function [errorHistory,mu,E,Output,i]=BP_test(sample,label,test,j,mu)
A=sample; %1024*245矩阵,1024为光谱波段数,245为样本个数
B=test; %1024*245矩阵,1024为光谱波段数,245为检测样本个数,个数可以为任意
C=label; %245*1, 245个标签
[M,N]=size(A);
[M1,N1]=size(B);
numberOfSample = N;%输入样本数量
numberOfSample1 = N1;
numberOfHiddenNeure = j;%第一个隐层的神经元数量
numberOfHiddenNeure1=1;%第二个隐层的神经元数量
inputDimension = M; %输入数据的维度
outputDimension = 1; %输出数据的维度
C1=C;
[A,ps1]=mapminmax(A');
A=A';
[B,ps2]=mapminmax(B'); %两种归一化的方法-第一种
B=B';
%
[C,ps3]=mapminmax(C');
C=C';
% for i=1:N
% A(:,i)=A(:,i)/norm(A(:,i)); %两种归一化的方法-第二种
%
% end
% for i=1:N1
% B(:,i)=B(:,i)/norm(B(:,i));
% end
%初始权值设定
W1 = (2 * rand(numberOfHiddenNeure, inputDimension) - 1)/sqrt(inputDimension); %35*1024
B1 = (2 * rand(numberOfHiddenNeure, 1) - 1)/sqrt(numberOfHiddenNeure); %35*1
W2 = (2 * rand(numberOfHiddenNeure1, numberOfHiddenNeure) - 1)/sqrt(numberOfHiddenNeure); %35*1
B2 = (2* rand(numberOfHiddenNeure1, 1) - 1)/(numberOfHiddenNeure); %1*1
W3 = (2 * rand(outputDimension, numberOfHiddenNeure1) - 1)/sqrt(numberOfHiddenNeure1); %1*1
B3=(2* rand(outputDimension, 1) - 1)/(numberOfHiddenNeure1); %1*1
% W1=zeros(numberOfHiddenNeure, inputDimension); %35*1
% W2=zeros(numberOfHiddenNeure1, numberOfHiddenNeure); %1*1
% W3=zeros(outputDimension, numberOfHiddenNeure1);
B1=zeros(numberOfHiddenNeure, 1); %35*1
B2=zeros(numberOfHiddenNeure1, 1); %1*1
B3=zeros(outputDimension, 1); %1*1
errorHistory = [];
maxEpochs=15000;
T=1:length(C);
E=0;
for i = 1:maxEpochs
hiddenOutput = logsig(W1 * A+ repmat(B1, 1, numberOfSample));
%隐含层输出
hiddenOutput2 =W2 * hiddenOutput+ repmat(B2, 1, numberOfSample);
%输出层输出
networkOutput= W3 * hiddenOutput2 + repmat(B3, 1, numberOfSample);
%计算网络输出和实际输出误差
error = C' - networkOutput;
temp=E; %E为损失函数
E= sumsqr(error)
%errorHistory保存E的值
%设置停止阙值
if E<0.5
break
end
dE=abs(temp-E);
%对系数mu以及权值做更新
%if E<=temp
if E<=temp
errorHistory = [errorHistory E];
mu=mu/1.0001;
else
mu=mu*1.0001;
end
% mu=0.0001*(exp(-dE))^2;
%delta和J均为以后更新权重需要用到的参数
delta3 = error; %1*245
delta2 = W3' * delta3; %1*245
delta1= W2' * delta2.*(hiddenOutput.*(1 - hiddenOutput)); %35*245
J3=hiddenOutput2; %1*245
J2=W3*hiddenOutput; %35*245
J1=ones(245,1)*W2*W3*(hiddenOutput.*(1 - hiddenOutput.^2))*A'; %245*1024
%更新权重和偏置项
dW3 = (J3*J3'+mu)^-1*delta3*hiddenOutput2'; %1*1
dB3 = delta3 * ones(numberOfSample, 1); %1*1
dW2= ((J2*J2'+mu*eye(numberOfHiddenNeure))\(delta2*hiddenOutput')')'; %1*35
dB2 = delta2 * ones(numberOfSample, 1); %1*1
dW1=((J1'*J1+mu*eye(M))\(delta1*A')')'; %35*1024
dB1= delta1 * ones(numberOfSample, 1); %35*1
W3 = W3+dW3/M; %1*1
B3 =B3 +dB3/M; %1*1
W2 = W2+dW2/M; %1*35
B2 = B2+dB2/M; %1*1
W1=W1+dW1/M; %35*1024
B1=B1+dB1/M; %35*1
plot(T,[networkOutput;C']');
drawnow;
end
%验证集验证数据,带入W,B
testHiddenOutput = logsig(W1 *B+ repmat(B1, 1, numberOfSample1)); %35*245
testHiddenOutput1=W2 * testHiddenOutput+ repmat(B2, 1, numberOfSample1) ; %1*245
Output = W3 * testHiddenOutput1+ repmat(B3, 1, numberOfSample1) ; %1*245
%Output=logsig(Output);
[C1,PS]=mapminmax(C1');
[output,PS1]=mapminmax(Output);
a = mapminmax('reverse',output,PS);
Output=a;
% mean(abs(a-C'))
% mean(abs(Output-C'))
% sse=norm(C-a')^2;
% sst=norm(C-mean(C))^2;
% ssr=norm(a'-mean(C))^2;
% R2=1-(sse/sst)
% plot(Output,'r'); hold on; plot(C','b');
%