clc
clear all
input_train=[0.1396 0.0912 0.0858 0.0319 0.0778 0.1755 0.0956 0.0912 0.0628 0.0769 0.1767 0.0492 0.1786 0.0427 0.1802 0.0698 0.0568 0.0156 0.0450 0.0893 0.1095 0.0967 0.0521 0.0174 0.0806 0.0186 0.0788 0.1166 0.0904 0.0760 0.0701 0.1791 0.1801 0.1058 0.0842 0.1209 0.0815 0.0126 0.0887 0.1728 0.0354 0.1015 0.0918 0.0931 0.1627 0.0978 0.1303 0.0833 0.0954 0.0881 0.0250 0.0258 0.0947 0.0956 0.1782 0.0866 0.1658 0.0949 0.0988 0.0937 0.1127 0.1571 0.0899 0.1009 0.1740 0.0650 0.0908 0.1353 0.0586 0.1539 0.0942 0.1037 0.0927 0.1705 0.0135 0.0939 0.1470 0.1250
0.6088 0.5789 0.5673 0.2950 0.5333 0.5962 0.5860 0.5810 0.4676 0.5291 0.5782 0.3855 0.5962 0.3444 0.5967 0.5092 0.4274 0.2272 0.3624 0.5779 0.6005 0.5820 0.4053 0.2313 0.5457 0.2440 0.5375 0.6044 0.5799 0.5252 0.4942 0.5953 0.5928 0.5995 0.5609 0.6159 0.5497 0.2150 0.5764 0.5993 0.3105 0.5979 0.5856 0.5832 0.6067 0.5814 0.6077 0.5573 0.5724 0.5746 0.2650 0.2704 0.5771 0.5854 0.5908 0.5700 0.5989 0.5792 0.5908 0.5800 0.6041 0.5930 0.5791 0.5912 0.5981 0.4833 0.5806 0.6028 0.4401 0.5998 0.5726 0.5937 0.5772 0.6025 0.2202 0.5849 0.6020 0.6053];
output_train=[5.4000 65.2000 74.3000 98.1000 83.3000 0.4000 44.6000 65.3000 92.9000 84.3000 0.0500 96.6000 0.3000 97.5000 0.2000 90.5000 94.9000 99.1000 97.3000 69.3000 24.3000 42.0000 96.2000 98.8000 80.3000 98.7000 82.3000 17.8000 67.3000 85.0000 89.0000 0.2500 0.1500 28.0000 76.3000 14.0000 79.3000 99.1500 70.3000 0.7000 98.0000 35.4000 63.5000 55.1000 1.6000 41.7000 9.0000 77.3000 44.8000 71.3000 98.6000 98.5000 46.5000 44.5000 0.1000 73.3000 1.2000 47.4000 39.9000 52.4000 20.6000 2.5000 68.3000 36.8000 0.5000 92.6000 66.3000 7.0000 94.3000 3.0000 48.3000 31.7000 59.9000 0.9000 99.4000 50.2000 4.0000 12.2000];
input_test=[ 0.0282 0.0824 0.0741 0.0797 0.0467 0.0874 0.0850 0.0401 0.0772 0.0540 0.0124 0.0668 0.0160 0.0215 0.0605
0.2812 0.5536 0.5184 0.5416 0.3703 0.5725 0.5642 0.3308 0.5305 0.4148 0.2116 0.4915 0.2292 0.2517 0.4519];
output_test=[98.3000 78.3000 87.2000 81.3000 97.0000 72.3000 75.3000 97.8000 84.1000 95.7000 99.3000 91.3000 98.9000 98.6500 93.2000];
[inputn,inputps]=mapminmax(input_train,0.1,0.9);
[outputn,outputps]=mapminmax(output_train,0.1,0.9);
% BP神经网络构建
%构建BP神经网络,设置网络隐层神经元个数为7个,并应用样本向量进行训练
net=newff(inputn,outputn,7);
% 网络参数的配置(迭代次数、学习率、目标)
net.trainParam.show = 50; %训练显示间隔:
net.trainParam.lr = 0.01; %学习速率
net.trainParam.mc = 0.6; %动量因子
net.trainParam.epochs = 1000; %迭代次数
net.trainParam.goal = 3e-5;%神经网络的学习目标,如果误差小于学习目标,算法停止
net.trainParam.max_fail=15;%设置失败停止门限
% BP神经网络训练
net=train(net,inputn,outputn);
% 预测数据归一化
inputn_test=mapminmax('apply',input_test,inputps);
% BP神经网络预测输出
an=sim(net,inputn_test);
% 预测输出结果的反归一化
BPoutput=mapminmax('reverse',an,outputps);
% 预测结果绝对误差和相对误差的取得
error=BPoutput-output_test;%预测结果绝对误差
rerror=100*(BPoutput-output_test)./output_test;%预测结果相对误差
% save('E:\Project Pr-Nd\Program\net.mat','net');
%load('C:\Users\zz\Desktop\net1.mat');
m=93;
n_train=78;
% 网络预测结果图形
figure(1)
subplot(2,2,1)
plot(output_test,': *')
hold on
plot(BPoutput,': ro');
legend('化验值','预测值')
title('BP神经网络预测输出对比图')
xlabel('验证样本个数')
ylabel('Nd元素组分含量(%)')
% h1=legend('desired value','forecast value');
% set(h1,'fontsize',7)
% title('BP neural network forecast output','fontsize',7)
% xlabel('number of test samples','fontsize',7)
% ylabel('element component content of Nd(%)','fontsize',7)
axis([0,15,0,100])
plot(0:0.5:(m-n_train),70,': k')
hold off
% 网络预测误差图形
subplot(2,2,2)
plot(1:(m-n_train),error,': *')
hold on
plot(1:(m-n_train),rerror,': ro')
% h2=legend('absolute error','relative error');
% set(h2,'fontsize',7)
% title('BP neural network forecast error','fontsize',7)
% xlabel('number of test samples','fontsize',7)
% ylabel('output error value(%)','fontsize',7)
legend('绝对误差','相对误差');
title('BP神经网络预测误差比较')
xlabel('验证样本个数')
ylabel('输出误差值(%)')
axis([0,15,-3.1,3.1])
plot(0:0.5:(m-n_train),0,': k')
hold off
%h=0.05;s=0.1767;
%hh=h;ss=s;
% input=[h;s];
% content_map=sim(net,input);
% nd=(content_map-0.1)/0.8; %反归一化
% pr=100-nd;