function [Training_Accuracy,Testing_Accuracy]=elm_subnets(X,Y,Xtest,Ytest,number_neurons,number_subnets,neurons_subnets,ELM_Type)
% elm_subnets:it is used to train a Single hidden Layer Feedforward Network with subnets based ELM ;
% X=a training set; matrix of N instances by Q atrebutes ;
% Y=a training targets; matrix of N raws and 1 atrebutes of the target values;
% xtest= testing inputs ;
% ytest= testing targets;
% number_neurons:number of neurons in the hidden layer (user decision);
% number_subnets:number of subnetworks in the hidden layer
% neurons_subnets :number of neurons in each subnetwork in the hidden layer
% ELM_Type: ELM_Type=1 for classification;ELM_Type= any Other values for regression;
% Training_Accuracy:RMSE for regression and cllassification rate for classification.
% A: a vector contains the idexes of subnets in the hidden layers
% note: for classification you should spicify classes with integers and
% each instance hase only one class(one target for each instance);
%%%%%%%%%%%%%%
%%%training%%%
%%%%%%%%%%%%%%
X=scaledata(X,0,1);% data normalization
Xtest=scaledata(Xtest,0,1);% data normalization
alpha=size(X);% dimension of training set.
%1)first step:generate a random input weights
input_weights=rand(number_neurons,alpha(2))*2-1;% generate random input weights
%2) second step:calculate H matrix
H=input_weights*X';% dot product
H=radbas(H);% activation function(you have the ability to change you function here)
%3) third step: subnets mapping
if number_subnets<=number_neurons
A=randi([1,number_neurons],1,number_subnets);% randomly shoose the position of subnets in the hidden layer
end
SUB_inputweights=rand(neurons_subnets,number_subnets)*2-1;% generate input weights for the subnets;
SUB_outputweights=rand(number_subnets,neurons_subnets)*2-1;% generate output weights for the subnets;
for i=1:number_subnets
input_subnet(i,:)=H(A(i),:); % load the A th hidden neurons
end
H_subnet=SUB_inputweights*input_subnet;% temporel hidden layer for the subnet
H_subnet=radbas(H_subnet);% activated hidden layer for the subnet
O_subnet=(H_subnet' * SUB_outputweights')';% the output of the subnet
O_subnet=radbas(O_subnet);% the activated output of the subnet
for i=1:number_subnets
H(A(i),:)=O_subnet(i,:); % update H
end
%%%%end subnets mapping
%4)forth step : calculate the output weights Beta
B=pinv(H') * Y ; %Moore-Penrose pseudoinverse of matrix
%%%%%%%%%%%%%%%calculate the training prefomance %%%%
if ELM_Type==1
tr_output=round(H' * B)' ;% calculate the training output
c_rate=zeros(1,length(tr_output));
for tb2=1: length(tr_output)
if tr_output(tb2)==Y(tb2)
c_rate(tb2)=1;
end
end
Training_Accuracy=sum(c_rate)*100/length(tr_output);%Classification rate
else
tr_output=(H' * B)' ;% calculate the training output
Training_Accuracy=sqrt(mse(Y',tr_output));%rmse
end
%%%%%%%%%%%%%%%%
%%%testing%%%%%%
%%%%%%%%%%%%%%%%
H_test=input_weights*Xtest';% calculating H matrix
H_test=radbas(H_test);% activation function(you have the ability to change you function here)
%%% subnets mapping
for i=1:number_subnets
ts_input_subnet(i,:)=H_test(A(i),:); % take samples
end
ts_H_subnet=SUB_inputweights* ts_input_subnet;% % temporel hidden layer for the subnet
ts_H_subnet=radbas(ts_H_subnet);% the activated hidden layer for the subnet
ts_O_subnet=(ts_H_subnet' * SUB_outputweights')';% the output the subnet
ts_O_subnet=radbas(ts_O_subnet);%% the Activated output the subnet
for i=1:number_subnets
H_test(A(i),:)=ts_O_subnet(i,:); % update H_test
end
%%% end of subnets mapping
%%%%%%%%%%%%%%%calculate the testing output %%%%
if ELM_Type==1
ts_output=round(H_test' * B)' ;% calculate the training output
else
ts_output=(H_test' * B)' ;% calculate the training output
end
%%%%%%%%%%%%%%%%calculate the prefomance%%%
if ELM_Type==1% calculate the output
c_rate=zeros(1,length(ts_output));
for tb2=1: length(ts_output)
if ts_output(tb2)==Ytest(tb2)
ts_c_rate(tb2)=1;
end
end
Testing_Accuracy=sum(ts_c_rate)*100/length(ts_output);%Classification rate
else
Testing_Accuracy=sqrt(mse(Ytest',ts_output));%rmse
end
end