function psobp
% BP neural network trained by PSO algorithm
% Copyright by Deng Da-Peng @ 2005
% Email: rexdeng@163.com
% You can change and distribute this code freely for academic usage
% Business usage is strictly prohibited
clc
clear all
AllSamIn=...; % Add your all input data
AllSamOut-...; % Add your all output data
% Pre-processing data with premnmx, you can use other functions
global minAllSamOut;
global maxAllSamOut;
[AllSamInn,minAllSamIn,maxAllSamIn,AllSamOutn,minAllSamOut,maxAllSamOut] = premnmx(AllSamIn,AllSamOut);
% draw 10 percent from all samples as testing samples,the rest as training samples
i=[10:10:1000]; %i10到1000,间隔10,1x100向量
TestSamIn=[];
TestSamOut=[];
for j=1:100
TestSamIn=[TestSamIn,AllSamInn(:,i(j))];
TestSamOut=[TestSamOut,AllSamOutn(:,i(j))];
end
TargetOfTestSam=...; % add reall output of testing samples
TrainSamIn=AllSamInn;
TrainSamOut=AllSamOutn;
TrainSamIn(:,i)=[];
TrainSamOut(:,i)=[];
% Evaluating Sample
EvaSamIn=...
EvaSamInn=tramnmx(EvaSamIn,minAllSamIn,maxAllSamIn); % preprocessing
global Ptrain; %定义全局变量Ptrain
Ptrain = TrainSamIn;
global Ttrain;
Ttrain = TrainSamOut;
Ptest = TestSamIn;
Ttest = TestSamOut;
% Initialize BPN parameters
global indim; %输入单元
indim=5;
global hiddennum; %隐层单元数
hiddennum=3;
global outdim; %输出单元
outdim=1;
% Initialize PSO parameters
vmax=0.5; % Maximum velocity
minerr=0.001; % Minimum error
wmax=0.90; %权重
wmin=0.30;
global itmax; %Maximum iteration number 迭代次数
itmax=300;
c1=2;
c2=2;
for iter=1:itmax
W(iter)=wmax-((wmax-wmin)/itmax)*iter; % weight declining linearly 权重线性下降