%% Clear the WorkSpace and CommandWindow
clear all
close all
clc
%% 读取northeast数据
procdata = xlsread('C:\Users\lxs\Desktop\ANN data analysis\201705voconorthANN\201705pH\ANNnorth.xlsx','N9');
%% Remove NaNs present in inputs and targets
for n = size(procdata,2):-1:1;
fNaN = isnan ( procdata(:,n) ) == 1;
procdata(fNaN,:) = [];
end;
clear fNaN n;
%% Prepare the inputs and targets in correct row-column configuration
inputs = procdata(:,1:9)'; %% 输入参量pressure,temperature,salinity,P,O,Si,NH4,NO2,NO3
targets = procdata(:,10)';%% 输出变量 AT
% outputs = data(:,8)';%% 输出变量 pH
%% Create a two-layer feed-forward Network has one hidden layer with ten neurons
% net = feedforwardnet(6);% 10是隐含层神经元的个数
%% Create a Fitting Network
nN = 6; % number of neurons in the hidden layer
net = feedforwardnet ( nN ) ;
%% Data preprocessing
net.inputs{1}.processFcns = {'removeconstantrows','mapminmax'}; %,'fixunknowns','mapstd'};
net.outputs{2}.processFcns = {'removeconstantrows','mapminmax'}; %,'fixunknowns','mapstd'};
%% Setup Division of Data for Training, Validation, Testing
% For a list of all data division functions type: help nndivide
net.divideFcn = 'dividerand'; % Divide data randomly
net.divideMode = 'sample'; % Divide up every sample
net.divideParam.trainRatio = 70/100;
net.divideParam.valRatio = 15/100;
net.divideParam.testRatio = 15/100;
%% Choose the transfer function
net.layers{1}.transferFcn = 'tansig';
%% Choose training algorithm
% For help on training function 'trainlm' type: help trainlm
% For a list of all training functions type: help nntrain
net.trainFcn = 'trainlm'; % Levenberg-Marquardt
%nett.trainFcn = 'traingdx'; % Variable learning rate --> quick but bad, 44%
%nett.trainFcn = 'trainbr'; % Bayesian regularization --> good, 86% before the end of training, long training
%nett.trainFcn = 'trainbfg'; % BFGS Quasi-Newton -- > good but many negative values 86%, long (9min)
%nett.trainFcn = 'traingdm'; % Gradient Descent with Momentum --> very bad 14%, finds a local minimum and stays
%nett.trainFcn = 'trainscg'; % Scaled Conjugate Gradient --> good 83%, few negative, really long, 19min without end
%% Choose a Performance Function
% For a list of all performance functions type: help nnperformance
net.performFcn = 'mse'; % Mean squared error 均方误差
%% Choose Plot Functions
% For a list of all plot functions type: help nnplot
net.plotFcns = { 'plotperform', 'plottrainstate', 'ploterrhist', 'plotregression', 'plotwb', };
%% Set display properties
net.trainParam.showWindow = true ; % 1-true, 0-false
net.trainParam.showCommandLine = false ;
net.trainParam.epochs = 1000;
net.trainParam.max_fail = 6;
net.trainParam.goal = 0;
%% Train the Network
tic
[ ann, tr ] = train ( net, inputs, targets );% ann 是 new network, tr 是 training record(epoch and perf)
toc
% y = ann(inputs);
% plot(inputs,targets,'o',inputs,y,'*');
w1 = ann.iw{1,1}';% 输入-隐含层的权值矩阵
% theta1 = ann.b{1};
w2 = ann.lw{2,1};% 输出-隐含层的权值矩阵
% theta2 = ann.b{2};
%% Test and evaluate the Network
% outputs = ann( inputs );
% errors = gsubtract( targets, outputs );% 作差
% performance = perform( ann, targets, outputs );
%% Simulate
outputs = sim ( ann, inputs ); % 'inputs2' from new dataset
errors = gsubtract ( targets, outputs );
% performance = perform ( ann, targets, outputs );
MAE = sum(abs(targets-outputs))/size(outputs,2);
RMSE = sqrt( sum((targets-outputs).^2)/size(outputs,2) );
R2 = 1-sum((targets-outputs).^2)/sum((targets-mean(targets)).^2);
figure;
plotregression(targets,outputs,'Train regression');
% axis([2075,2265,2075,2265])
%% 获得训练、验证和测试的结果
trainTargets = targets.*tr.trainMask{1};
valTargets = targets.*tr.valMask{1};
testTargets = targets.*tr.testMask{1};
trainPerformance = perform(ann,trainTargets,outputs);
valPerformance = perform(ann,valTargets,outputs);
testPerformance = perform(ann,testTargets,outputs);
%% 查看网络的各个参数
% view(nett);
% Save the ensemble of nets identified by current date and net stores Nesb# of individual nets
OutFile = strcat (datestr(floor(now),'ddmmyy'),...
'_NET',num2str(nN),'.mat');
save( OutFile, 'ann','tr','w1','w2');