close all, clear all, clc, format compact
% define classes
p1=[342 2322]';
p2=[588 1952]';
p3=[652 997]';
p4=[378 997]';
p5=[437 2761]';
p6=[669 2349]';
p7=[781 1136]';
p8=[459 1105]';
p9=[452 3081]';
p10=[717 2501]';
p11=[803 1210]';
p12=[494 1345]';
P=[p1 p2 p3 p4 p5 p6 p7 p8 p9 p10 p11 p12];
col1 = [p1 p5 p9];
col5 = [p2 p6 p10];
col7 = [p3 p7 p11];
col10= [p4 p8 p12];
% define output coding for classes
T1 = [1 -1]';
T2 = [1 1]';
T3 = [-1 1]';
T4 = [-1 -1]';
T = [T1 T2 T3 T4 T1 T2 T3 T4 T1 T2 T3 T4];
% Plot input samples
plot(col1(1,:),col1(2,:),'bs')
hold on
plot(col5(1,:),col5(2,:),'r+')
plot(col7(1,:),col7(2,:),'go')
plot(col10(1,:),col10(2,:),'m*')
% set learning rate
lr = maxlinlr(P,'bias');
% define a linear network
net = linearlayer(0,lr);
%configure the network according to the input and output
net = configure(net,P,T);
% define initial input weights and biases
w = [[0 0]; [0 0]];
b=[-2;-3];
net.IW{1,1} = w;
net.b{1} = b;
% train the network
net.trainParam.goal= 0;
net.trainParam.epochs = 3000;
[net, tr] = train(net,P,T);
% plot the decicion boundary
figure(1)
plotpc(net.IW{1,1},net.b{1});
%output the weights and biases
weights = net.IW{1,1};
biases = net.b{1};
% change the activation function to hardlims to start classification
net.layers{1,1}.transferFcn = 'hardlims';
% classify
pt1=[400 2500]';
pt2=[600 1200]';
pt3=[900 1300]';
pt4=[700 2400]';
pt5=[500 1000]';
pt6=[400 2000]';
pt7=[800 1500]';
pt8=[700 2700]';
pt9=[300 900]';
pt10=[1000 1500]';
ptest = [pt1 pt2 pt3 pt4 pt5 pt6 pt7 pt8 pt9 pt10];
for cnt = 1:10
cc=ptest(:,cnt)
y = net(cc)
out(:,cnt)=y;
end