load traindata2.mat
% N=length(x); %准备输入和输出数据
N=10000;
lag1=2; lag2=5; % 自回归阶数
lag=max(lag1,lag2);
[xn,xps]=mapminmax(x);
[yn,yps]=mapminmax(y);
yout=y(lag+1:N);
xn1=zeros(lag1,N-lag);
xn2=zeros(lag2,N-lag);
%% 求输出x
% for m=1:lag1
% xn1(m,:)=xn(lag-m:N-m-1);
% end
% for n=1:lag2
% xn2(n,:)=yn(1+lag-n+1:N-n+1);
% end
%% 求输出y
for m=1:lag1
xn1(m,:)=xn(1+lag-m:N-m);
end
for n=1:lag2
xn2(n,:)=yn(1+lag-n:N-n);
end
%% 求解y
inputn=[xn1;xn2 ];
outputn=yn(lag+1:N);
%创建网络
net = newff(inputn,outputn,20 ,{'tansig'}); %(输入,目标输出,隐含层)
net.trainParam.epochs = 500; %最大训练步数
net.trainParam.lr = 0.01; %学习速率
net.trainParam.mc=0.9; %动量因子
% % net.trainParam.lr_inc=1.05;
net.trainParam.goal = 0.00001; %目标误差
tic
% % ll=10000;
% mm=8000;
% for i=1:floor((N-lag)/mm)
% % i=1;
% net = train(net,inputn(:,mm*(i-1)+1:mm*i),outputn(mm*(i-1)+1:mm*i)); %训练(要训练的网络,输入,目标输出)
% end
net = train(net,inputn,outputn);
toc
an=sim(net,inputn);
BP_train=mapminmax('reverse',an,yps);
%% 结果分析
%测试结果
dt=0.05;
t1=dt:dt:(N-lag)*dt;
figure(1)
plot(t1,BP_train,'r-')
hold on
plot(t1,yout,'b');
legend('模型输出','期望输出')
title('BP网络训练输出','FontName','Times New Roman','fontsize',14,'FontWeight','Bold')
ylabel('y','FontName','Times New Roman','fontsize',14,'FontWeight','Bold')
xlabel('Time(ms)','FontName','Times New Roman','fontsize',14,'FontWeight','Bold')
%误差
error=BP_train-yout;
perf=mse(error);
figure(2)
plot(t1,error);
title('BP网络训练误差','FontName','Times New Roman','fontsize',14,'FontWeight','Bold')
ylabel('error','FontName','Times New Roman','fontsize',14,'FontWeight','Bold')
xlabel('Time(ms)','FontName','Times New Roman','fontsize',14,'FontWeight','Bold')
评论0