train=[2098.8 2.66 7.43 628 20.5 2.9 67.8
2215 2.45 7.32 638 20.6 2.42 70.1
2494.8 2.53 6.87 582 20.3 3.61 80.5
2983.2 2.21 7.07 551 19.8 3.68 89
2056.4 2.65 6.94 566 19.2 2.5 77.8
2017.6 2.47 7.21 605 18.9 3.7 78.7
2432 2.56 6.65 552 20.7 2.8 70.3
1945.6 2.58 6.88 860 20.8 4.1 84.2
1966.8 2.58 7.65 840 20.8 3.45 59.4
1980 2.56 6.98 808 20.9 2.39 82.9
2164.8 2.57 7.63 721 20.5 2.66 76
2639.5 2.64 6.39 780 19.5 2.87 89.2
1842.4 2.39 6.83 843 18.6 2.25 59.7
1779.2 2.66 7.28 831 18.4 2.48 52.4
2344 2.87 6.76 743 17.9 2.69 82
2108 2.25 6.95 818 17.8 2.84 77.8
1896 2.48 7.03 884 18.3 1.88 70.6
2280 2.69 7.16 735 20.4 3.02 74.4
2011.2 2.84 6.71 805 20.7 2.56 77.1
2117.6 2.48 7.26 777 20.8 2.21 78.3
2100.8 2.65 7.31 555 20.9 3.87 79.9
2497.6 2.85 7.29 611 21.1 3.8 93.1
2042.8 2.34 6.55 569 21.1 2.18 70.3
2003.2 2.29 7.28 625 21.2 2.05 71
2438.8 2.89 6.1 438 21.2 2.05 87
2042.8 2.31 7.81 844 21.3 2.34 73.6
2164.8 3.05 7.05 428 21.3 2.29 75.8
1937.2 2.65 7.18 816 21.4 2.89 63
2095.6 2.72 6.92 768 21.6 2.31 65.3
1797 2.36 6.62 826 21.7 4.05 53.1
1862.4 2.54 7.04 813 21.8 3.65 62.2
2424.2 2.78 7.28 827 21.9 2.12 88.2
2411.2 2.96 7.36 705 22 2.36 84.1
2211.2 2.88 6.35 723 22.1 2.54 85
2123.7 2.61 7.27 825 22.3 2.78 77
1797 2.38 6.98 844 22.2 2.96 69.9
1953 2.67 6.85 524 22.1 2.88 71.3
1980 2.87 7.02 785 22.4 3.06 71.4
1966 2.57 7.09 884 22.5 3.15 71.6
1970 2.66 7.39 946 22.8 3.21 74.7
1927.7 2.39 6.94 604 22.9 3.01 69.4
2006 2.89 6.96 654 23.2 2.26 75.1
2162.9 2.83 6.75 629 23.4 2.61 75.3
2372 2.64 6.68 329 23.3 2.38 76.2
2293.6 3.08 6.69 870 23.6 2.67 75.4
2306.6 2.53 6.75 736 23.7 2.87 80.2
2267.5 2.48 7.11 768 23.8 2.17 81.8
2162.9 2.72 6.2 550 23.7 2.66 77.7
2319.7 3.65 7.03 857 24.1 2.39 85
2241.3 2.83 7.17 883 24.2 2.09 83.6
2084.5 2.54 6.8 832 24.3 2.83 73.8
2151.6 2.99 6.78 870 24.3 2.64 77.1
1861.2 3.16 6.74 588 24.6 3.08 66.3
2411.2 2.48 6.04 819 24.8 2.62 85.2
2293.6 2.51 7.23 784 24.9 2.31 86
2241.3 2.73 6.64 733 24.5 2.53 84.1
2215.2 2.67 6.82 827 24.9 2.48 80.7
2202.1 2.69 6.95 705 25.3 2.72 78.9
1784 2.38 7.06 778 25.1 3.65 59.4
2110 2.76 7.56 689 25.5 2.33 64.3
2176 2.71 6.84 755 25.9 2.54 70.9
1953.8 2.65 7.08 624 25.7 2.99 67.8
1980 2.58 7.14 587 26.2 3.26 64.6
1823.2 2.58 7.28 643 26.4 3.48 64.2
2411.2 2.79 7.27 866 26.7 2.11 82
2006.2 3.06 6.54 759 26.9 2.73 75.9
2241.6 3.18 7.3 777 27.2 2.37 78.8
2118.1 2.91 7.32 821 27.3 2.69 70.1
2319.2 2.85 7.26 905 27.8 2.38 72.7
2280.4 2.83 6.57 925 27.9 2.76 87.2
2125.5 2.68 6.61 881 27.8 2.71 75.9
2293.5 2.73 6.39 655 28.2 2.65 74.1
2370.9 2.99 7.82 800 28.6 2.58 78.9
2422.6 2.89 7.52 865 28.8 2.49 81.7
2151.6 3.02 7.13 686 28.6 2.72 72
2008.8 2.76 6.14 792 28.9 2.75 69.4
1995.8 2.98 7.48 752 29.1 3.75 64.3
1827.7 3.37 7.01 588 29.3 3.83 58.8
2409.7 3.04 7.44 602 29.4 2.28 79.6
2189.8 3.16 7.07 749 29.4 2.68 76
2034.6 3.82 6.59 746 29.5 2.77 74.1
2138.1 2.89 6.67 743 29.7 2.72 75.8
2112.3 3.12 6.26 718 29.8 2.86 76.1
2021.7 2.41 6.16 732 29.9 3.13 75.3
2306.6 3.25 7.25 878 29.8 2.36 86
2146.6 2.76 7.88 820 30.2 2.58 80.3
2026.6 2.87 7.69 673 30.3 2.79 70.3
1933.3 2.79 7.45 524 30.5 3.06 60.9
1813 2.68 7.37 498 30.7 3.18 54.4
2074.4 2.92 6.26 698 31.2 2.91 68.3
2128.8 2.96 6.49 867 31.5 2.85 76.1
2169.6 2.88 7.06 809 31.8 2.83 76.6
2196.8 2.68 7.84 814 31.9 2.68 77.4
2164.4 2.79 6.88 778 32.4 2.73 77.7
2074.4 2.81 7.81 646 32.6 2.99 71.1
2013.8 2.72 6.43 668 32.7 2.89 70.4
2069.4 2.46 6.75 653 32.5 3.02 72.3
2236 2.73 7.59 749 32.4 2.76 85.2
1805.5 3.09 7.46 567 32.4 3.42 61.9
2404.4 2.85 7.66 838 32.1 2.22 89.1
1979 2.89 7.64 660 32 2.98 64.2
1818 3.02 7.62 625 31.7 3.37 62.1
1831 2.52 7.01 605 31.8 3.04 60.7
1843 2.62 7.78 610 31.6 3.16 60.9
1762 2.74 7.58 629 31.4 3.82 59.5
2076 2.57 7.61 704 30.8 2.89 67.2
1681 2.79 7.32 663 30.9 4.12 63.8
2457 2.64 6.41 754 30.5 2.41 84.7
1827 2.6 6.5 671 30.1 3.25 64.8
2139.2 2.55 6.49 761 29.8 2.76 70.3
2049 2.88 6.67 665 29.4 2.87 69.4
2075 2.67 6.45 659 29.1 2.79 71.3
2202 2.95 7.64 662 28.8 2.68 76
1924 3.02 7.29 655 28.4 2.92 64.9
1908 2.66 7.05 659 28.6 2.96 63.9
1947 2.76 6.82 683 27.7 2.88 66.2
1896 2.81 7.33 674 27.1 3.01 63.2
1955 2.69 7.19 594 26.7 3.06 62.4
2022 2.85 6.79 670 26.8 2.79 76.8
2044 2.95 7.18 680 26.5 2.99 78.1];
test=[1997 3.17 6.38 574 26.4 3.03 73.4
2073 2.55 6.68 701 26.1 2.94 69.2
2045 2.58 6.81 717 25.4 2.96 64.8
2057 2.59 6.84 722 25.7 3.04 60.7
2098 2.65 7.35 675 25.1 2.86 64.3
2120 2.76 7.41 697 25.3 2.76 75
2070 2.74 7.33 680 24.7 2.98 64.6
2068 2.86 6.63 698 24.8 2.67 72.3
2086 2.93 6.56 688 24.3 2.73 73.3
2016 2.96 6.87 690 24.2 2.86 71
2079 2.86 7.04 676 23.8 2.84 73.2
1990 2.88 7.08 597 23.7 3.28 61.9
2018 3.02 7.29 638 23.6 2.9 71.7
1951 2.49 6.23 582 23.2 2.99 68.8
2141 2.53 6.98 739 22.9 2.81 74.1
2099 2.57 7.11 706 22.7 2.72 73.7
2074 2.82 6.99 509 22.4 3.46 69.1
1885 2.85 7.2 558 22 3.27 53.4
1933 2.88 6.66 564 21.7 3.09 59.2
1909 2.76 6.89 551 21.3 3.15 61.1
2073 2.7 7.18 666 21.4 2.89 73.9
1998 2.73 7.51 599 21.1 3.02 66.3
2158 2.85 6.37 741 21.6 2.52 78
2104 2.79 6.54 729 20.8 2.49 76.1
2044 2.88 7.73 708 20.7 2.76 73.6
2061 2.69 7.09 716 20.6 2.57 75.3
2168 2.68 7.16 727 20.5 2.62 78.3
2135 2.66 7.12 720 20.6 2.74 74.6
2116 2.82 7.03 811 20.4 2.6 77.3
2149 2.88 7.42 826 20.7 2.55 77.8
2455 2.64 6.47 739 29.5 2.44 82.7
1792 2.6 6.62 663 28.6 3.25 64.8
2139 2.55 6.49 761 29.8 2.76 74.3
2049 2.88 6.67 665 29.4 2.87 70.4
2075 2.67 6.45 659 29.1 2.79 71.3
2232 2.45 7.45 683 28.8 2.59 79.7
1864 3.02 7.29 655 28.4 2.92 54.9
1933 2.66 7.05 659 28.6 2.96 63.9
1945 2.76 6.82 679 27.8 2.88 66.2
1786 2.84 7.31 679 27.3 2.98 63.2
2005 2.64 7.15 603 26.9 3.01 62.4
2034 2.81 6.79 670 26.4 2.77 76.8
2021 2.96 7.16 677 26.6 2.94 76.1
1967 3.12 6.42 568 26.5 3.01 70.4
2085 2.58 6.68 723 26.4 2.99 66.2
1992 2.62 6.83 708 25.7 2.96 64.8
2057 2.59 6.84 722 25.7 3.04 60.7
2078 2.72 7.35 645 25.2 2.86 68.4
2121 2.71 7.41 699 25.3 2.79 68.7
2066 2.73 7.33 681 24.5 2.97 64.6
];
% n_fault=125;
ini = size(train,1); %number of initial data samples
%ini = 100 ; %number of initial data samples
lv=4; % Used number of latent variable to consider
%Loading data
%data = load ('process.txt');
data = [train;test];
Xtot = data(:,1:6);
Ytot = data(:,7);
[n,m] = size(Xtot); %n: number of total samples
[n,p] = size(Ytot);
trn_no=size(train,1);
tst_no=size(test,1);
X_trn=train(:,1:6);%X
Y_trn=train(:,7); %Y
X_tst=test(:,1:6);
Y_tst=test(:,7);
% Preprocessing
[trnx,mnsx,stdx] = zscore(X_trn);
[trny,mnsy,stdy] = zscore(Y_trn);
tstx=scale(X_tst,mnsx,stdx);
tsty=scale(Y_tst,mnsy,stdy);
%----------------------------------------------------------
%----------------------------------------------------------
%batch PLS for initial data
% [b,ssq,p,q,w,t,u,bin] = pls(trnx,trny,lv,1);
[b,ssq,p,q,w,t,u,bin,Yhat] = pls(trnx,trny,lv,1);
%function [m,ssq,p,q,w,t,u,b,Yhat] = pls(x,y,lv,out)
%[b,ssq,p,q,w,t,u,bin] = pls(trnx,trny,lv,1);
%an optional variable (out) to suppress intermediate output [out=0 suppresses output]
ky=size(trny,2);
% Mean Squared Error Prediciton
msep_pls=[];
for i=1:lv
predtrn=trnx*b((i-1)*ky+1:i*ky,:)';
predtst=tstx*b((i-1)*ky+1:i*ky,:)';
msep_pls=[msep_pls; sum((trny-predtrn).^2)/trn_no ...
sum((tsty-predtst).^2)/tst_no];
end
%pls_predy=tstx*b((lv-1)*ky+1:lv*ky,:)';
%
% Mean Squared Error for TRN, CV & PRED
%
msep=[];
disp(' ')
disp(' Mean Squared Error for LPLS ')
disp(' ')
disp(' LV # -TRN- -PRED- ')
disp(' ---- ------ ------ ------')
msep = [(1:lv)' sum(msep_pls(:,[1:ky]),2) sum(msep_pls(:,[ky+1:2*ky]),2)];% msep_ifpls(:,1) msep_nnpls(:,1)];
format = ' %3.0f %6.2f %6.2f ';%6.2f %6.2f';
for i = 1:lv
tab = sprintf(format,msep(i,:)); disp(tab)
end
disp(' ')
%
predx=tstx; % choose data to predict-[calx,mnsx,stdx] = zscore(X_total);
yreal=Y_tst;
nn=size(yreal,1);
p
评论0