clear all;
iNum_Train_PerSample = 5;
iNum_Test_PerSample = 5;
iHeight = 112;
iWidth = 92;
iClass = 40;
iNum_PerSample = 10;
iDim_Image = iHeight * iWidth;
Image = zeros(iDim_Image, iClass, iNum_PerSample);
fprintf('********** Step 1 Read Images **********\n');
for i = 1 : iClass
for j = 1 : iNum_PerSample
fcount = (i - 1) * 10 + j;
if fcount < 10
filestr = ['00' int2str(j)];
elseif fcount < 100
filestr = ['0' int2str(fcount)];
else
filestr = int2str(fcount);
end
str_Path_Image = ['D:\人脸库\ORL\orl' filestr];
B = imread(str_Path_Image, 'bmp');
Image(:, i, j) = reshape(B, [iDim_Image, 1]);
end
end
rand_num = randperm(iNum_PerSample);
train = rand_num(1:iNum_Train_PerSample);
test = rand_num(iNum_Train_PerSample + 1:iNum_PerSample);
% Construct the train and test matrix
iNum_Total_Train = iNum_Train_PerSample * iClass;
iNum_Total_Test = iNum_Test_PerSample * iClass;
M_Sample_Train = zeros(iDim_Image, iNum_Total_Train);
M_Sample_Test = zeros(iDim_Image, iNum_Total_Test);
for i = 1 : iClass
for j = 1 : iNum_Train_PerSample
M_Sample_Train(:, (i - 1) * iNum_Train_PerSample + j) = Image(:, i, train(j));
end
end
for i = 1 : iClass
for j = 1: iNum_Test_PerSample
M_Sample_Test(:, (i - 1) * iNum_Test_PerSample + j) = Image(:, i, test(j));
end
end
fprintf('********** Step2 PCA **********\n');
M_Mean_Train = zeros(iDim_Image, 1);
for i = 1 : iNum_Total_Train
M_Mean_Train = M_Mean_Train + M_Sample_Train(:, i);
end
M_Mean_Train = (1/iNum_Total_Train).*M_Mean_Train;%中心化
M_Center_Train = zeros(iDim_Image, iNum_Total_Train);
for i = 1 : iNum_Total_Train
M_Center_Train(:, i) = M_Sample_Train(:, i) - M_Mean_Train;
end
M_Cov = M_Center_Train' * M_Center_Train;
[vector, value] = eig(M_Cov);
Value = diag(value);
[Value, Index] = sort(Value);
Axes_pca = 60;
EigenVector = zeros(iDim_Image, Axes_pca);
for i = Axes_pca : -1 : 1
j = iNum_Total_Train + 1 - i;
EigenVector(:, i) = (1/sqrt(Value(j))).* M_Center_Train * vector(:, Index(j));
EigenVector(:, i) = EigenVector(:, i) / norm(EigenVector(:, i));
end
M_Pca_Train = EigenVector' * M_Sample_Train;
M_Pca_Test = EigenVector' * M_Sample_Test;
fprintf('********** Step3 contruct coefficient **********\n');
W=zeros(iNum_Total_Train,iNum_Total_Train);
for i=1:iNum_Total_Train
Train = M_Sample_Train(:,i);
uesfulsample = M_Sample_Train;
uesfulsample(:,i)=[];
solution =inv(uesfulsample'*uesfulsample+0.01*eye( iNum_Total_Train-1))*uesfulsample'*Train;
Test = M_Sample_Test(:,i);
end
bb = [];
cc = [];
[solution,bb,cc] = l1_ls(uesfulsample,Test,0.01);
for i=1:iNum_Total_Train
for j=1:iNum_Total_Train
if i>j
W(j,i)=solution(j);
elseif i==j
W(j,i)=0;
elseif i<j
W(j,i)=solution(j-1);
end
end
end
iDim_lda = Axes_pca;
fprintf('************step 4 get matrix****************\n');
Kw=1;%类内k近邻数
Sw=zeros(iDim_lda,iDim_lda);
for i=1:iClass
for j=1:iNum_Train_PerSample
ci=[iNum_Train_PerSample*(i-1)+1:iNum_Train_PerSample*i];%每i类样本索引
Classi=M_Sample_Train(:,ci);
index=j+(i-1)*iNum_Train_PerSample;
x=M_Sample_Train(:,index);
coefficient= W(:,index);
coefficienti=coefficient(ci);
KNw=WithinclassNeighbors(M_Sample_Train,iNum_Train_PerSample,x,ci,j,Kw);
s=zeros(1,Kw);
for k1=1:Kw
s(k1)=coefficienti(KNw(k1));
end
m=zeros(iDim_Image,1);%求x的类内k近邻点的均值
for k1=1:Kw
m=m+Classi(:,KNw(k1))*(coefficienti(KNw(k1))/norm(s));
end
Sw=Sw+(EigenVector'*(x-m))*(EigenVector'*(x-m))';
end
end
Kb=2;%边界点个数
Sb=zeros(iDim_lda,iDim_lda);
for i=1:iClass-1
for j=1:iNum_Train_PerSample
ci=[iNum_Train_PerSample*(i-1)+1:iNum_Train_PerSample*i];%第i类样本索引
Classi=M_Sample_Train(:,ci);
index=j+(i-1)*iNum_Train_PerSample;
x=M_Sample_Train(:,index);
Coefficient= W(:,index);
bias=[];
wucha=[];
for m=i+1:iClass
cm=[iNum_Train_PerSample*(m-1)+1:iNum_Train_PerSample*m];%第m类样本索引
Classm=M_Sample_Train(:,cm);
KNb=MaginalpointsBetweenclasses(Classm,x,Kb,iNum_Train_PerSample);
Coefficienti=Coefficient(cm);
w=zeros(1,Kb);
for k1=1:Kb
w(k1)=Coefficienti(KNb(k1));
end
b=zeros(iDim_Image,1);
for k1=1:Kb
b=b+Classm(:,KNb(k1))*(Coefficienti(KNb(k1))/sum(w));
end
residual=sum(x-b);
bias=[bias,x-b];
wucha=[wucha, residual];
[val,idx]=min(wucha);
X=bias(idx);
end
Sb=Sb+(EigenVector'*X)*(EigenVector'*X)';
end
end
R = zeros(iDim_lda, iDim_lda);
R = inv(Sw) * Sb;
[vector, value] = eig(R);
Value = diag(value);
[Value, Index] = sort(Value);
k = 1;
Axes = [];
Rate_Nearest = [];
Rate_Min_Dist = [];
Rate_Cosine_Distance = [];
for temp = 10 : 5 : iDim_lda
EigVector = zeros(iDim_lda, temp);
for i = temp : -1 : 1
EigVector(:, i) = vector(:, Index(iDim_lda + 1 - i));
EigVector(:, i) = EigVector(:, i) / norm(EigVector(:, i));
end
M_Feature_Train = EigVector' * M_Pca_Train;
M_Feature_Test = EigVector' * M_Pca_Test;
% WrongSample_Min = Min_Distance_Classifier(M_Feature_Train, M_Feature_Test, iNum_Train_PerSample, iNum_PerSample, iClass);
% Rate_Min = (iNum_Test_PerSample * iClass - WrongSample_Min) / (iNum_Test_PerSample * iClass);
WrongSample_Near = Nearest_Classifier(M_Feature_Train, M_Feature_Test, iNum_Train_PerSample, iNum_PerSample, iClass);
Rate_NN = (iNum_Test_PerSample * iClass - WrongSample_Near) / (iNum_Test_PerSample * iClass);
Axes(k) = temp;
Rate_Nearest(k) = Rate_NN;
% Rate_Min_Dist(k) = Rate_Min;
k = k + 1;
end
Rate_Nearest
fprintf('\n');
hold on ;
plot(Axes(), Rate_Nearest,'b-d');
没有合适的资源?快使用搜索试试~ 我知道了~
MLRE.rar_K-BEST MATLAB_K._K近邻算法
共1个文件
m:1个
1.该资源内容由用户上传,如若侵权请联系客服进行举报
2.虚拟产品一经售出概不退款(资源遇到问题,请及时私信上传者)
2.虚拟产品一经售出概不退款(资源遇到问题,请及时私信上传者)
版权申诉
0 下载量 73 浏览量
2022-09-19
19:14:41
上传
评论
收藏 2KB RAR 举报
温馨提示
MLRE算法,选取最优K近邻,是识别率达到更高
资源详情
资源评论
资源推荐
收起资源包目录
MLRE.rar (1个子文件)
MLRE.m 6KB
共 1 条
- 1
御道御小黑
- 粉丝: 61
- 资源: 1万+
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功
评论0