clc
clear
close all
%导入数据,新添加的时候注意要修改B1:BUB121这里的范围
X=xlsread('LSNIR.xlsx','','B1:BUB121')
Y=xlsread('LS含量','','B2:D122')
%% 数据处理
n=input('请输入n=')%n=1原始数据;n=2标准化;n=3求导
if n==1
X=X
Y=Y
y1=Y(:,1)
y2=Y(:,2)
y3=Y(:,3)
elseif n==2
X=zscore(X)
Y=zscore(Y)
y1=Y(:,1)
y2=Y(:,2)
y3=Y(:,3)
elseif n==3
X=diff(X,1,2)
Y=Y
y1=Y(:,1)
y2=Y(:,2)
y3=Y(:,3)
end
%% 水
n = length(y1);
m=randperm(size(X,1));%蒙特卡洛随机选取测试集
c = cvpartition(n,'HoldOut',0.3);
idxTrain = training(c,1);
idxTest = ~idxTrain;
%划分训练集和测试集
XTrain = X(idxTrain,:);
yTrain = y1(idxTrain);
XTest = X(m(idxTest),:);
yTest = y1(m(idxTest));
%Find the coefficients of a regularized linear regression model using 10-fold cross-validation and the elastic net method with Alpha = 0.75. Use the largest Lambda value such that the mean squared error (MSE) is within one standard error of the minimum MSE.
[B,FitInfo] = lasso(XTrain,yTrain,'Alpha',0.75,'CV',10);
axTrace = lassoPlot(B,FitInfo);% 交叉验证训练轨迹
axCV = lassoPlot(B,FitInfo,'PlotType','CV');
idxLambda1SE = FitInfo.Index1SE;
coef = B(:,idxLambda1SE);
coef0 = FitInfo.Intercept(idxLambda1SE);
%Predict exam scores for the test data. Compare the predicted values to the actual exam grades using a reference line.
T_sim2 = XTest*coef + coef0;
T_sim1 = XTrain*coef + coef0;
figure
scatter(yTest,T_sim2)
hold on
plot(yTest,yTest)
xlabel('Actual Exam Grades')
ylabel('Predicted Exam Grades')
hold off
%%相关指标计算;
%R2
R1 = 1 - norm(yTrain - T_sim1)^2 / norm(yTrain - mean(yTrain))^2;
R2= 1 - norm(yTest - T_sim2)^2 / norm(yTest - mean(yTest ))^2;
disp(['训练集数据的R2为: ',num2str(R1)])
disp(['测试集数据的R2为: ', num2str(R2)])
%MAE
mae1 = sum(abs(T_sim1 - yTrain)) ./ 85;
mae2 = sum(abs(T_sim2 -yTest)) ./36;
disp(['训练集数据的MAE为: ',num2str(mae1)])
disp(['测试集数据的MAE为: ',num2str(mae2)])
%MBE
mbe1 = sum(T_sim1 - yTrain) ./85;
mbe2 = sum(T_sim2 - yTest) ./ 36;
disp(['训练集数据的MBE为: ',num2str(mbe1)])
disp(['测试集数据的MBE为: ',num2str(mbe2)])
%RMSE
rmse1 = sqrt(mean((T_sim1 - yTrain)));
rmse2 = sqrt(mean((T_sim2 - yTest)));
disp(['训练集数据的RMSE为: ',num2str(rmse1)])
disp(['测试集数据的RMSE为: ',num2str(rmse2)])
acc2=1-rmse2
disp(['模型的精度为: ',num2str(acc2)])
%% 灰
n = length(y2);
m=randperm(size(X,1));
c = cvpartition(n,'HoldOut',0.3);
idxTrain = training(c,1);
idxTest = ~idxTrain;
XTrain = X(idxTrain,:);
yTrain = y2(idxTrain);
XTest = X(m(idxTest),:);
yTest = y2(m(idxTest));
%Find the coefficients of a regularized linear regression model using 10-fold cross-validation and the elastic net method with Alpha = 0.75. Use the largest Lambda value such that the mean squared error (MSE) is within one standard error of the minimum MSE.
[B,FitInfo] = lasso(XTrain,yTrain,'Alpha',0.75,'CV',10);
axTrace = lassoPlot(B,FitInfo);% 交叉验证训练轨迹
axCV = lassoPlot(B,FitInfo,'PlotType','CV');
idxLambda1SE = FitInfo.Index1SE;
coef = B(:,idxLambda1SE);
coef0 = FitInfo.Intercept(idxLambda1SE);
%Predict exam scores for the test data. Compare the predicted values to the actual exam grades using a reference line.
T_sim2 = XTest*coef + coef0;
T_sim1 = XTrain*coef + coef0;
figure
scatter(yTest,T_sim2)
hold on
plot(yTest,yTest)
xlabel('Actual Exam Grades')
ylabel('Predicted Exam Grades')
hold off
%%相关指标计算;
%R2
R1 = 1 - norm(yTrain - T_sim1)^2 / norm(yTrain - mean(yTrain))^2;
R2= 1 - norm(yTest - T_sim2)^2 / norm(yTest - mean(yTest ))^2;
disp(['训练集数据的R2为: ',num2str(R1)])
disp(['测试集数据的R2为: ', num2str(R2)])
%MAE
mae1 = sum(abs(T_sim1 - yTrain)) ./ 85;
mae2 = sum(abs(T_sim2 -yTest)) ./36;
disp(['训练集数据的MAE为: ',num2str(mae1)])
disp(['测试集数据的MAE为: ',num2str(mae2)])
%MBE
mbe1 = sum(T_sim1 - yTrain) ./85;
mbe2 = sum(T_sim2 - yTest) ./ 36;
disp(['训练集数据的MBE为: ',num2str(mbe1)])
disp(['测试集数据的MBE为: ',num2str(mbe2)])
%RMSE
rmse1 = sqrt(mean((T_sim1 - yTrain)));
rmse2 = sqrt(mean((T_sim2 - yTest)));
disp(['训练集数据的RMSE为: ',num2str(rmse1)])
disp(['测试集数据的RMSE为: ',num2str(rmse2)])
acc2=1-rmse2
disp(['模型的精度为: ',num2str(acc2)])
%% 酸
n = length(y3);
m=randperm(size(X,1));
c = cvpartition(n,'HoldOut',0.3);
idxTrain = training(c,1);
idxTest = ~idxTrain;
XTrain = X(idxTrain,:);
yTrain = y3(idxTrain);
XTest = X(m(idxTest),:);
yTest = y3(m(idxTest));
%Find the coefficients of a regularized linear regression model using 10-fold cross-validation and the elastic net method with Alpha = 0.75. Use the largest Lambda value such that the mean squared error (MSE) is within one standard error of the minimum MSE.
[B,FitInfo] = lasso(XTrain,yTrain,'Alpha',0.75,'CV',10);
axTrace = lassoPlot(B,FitInfo);% 交叉验证训练轨迹
axCV = lassoPlot(B,FitInfo,'PlotType','CV');
idxLambda1SE = FitInfo.Index1SE;
coef = B(:,idxLambda1SE);
coef0 = FitInfo.Intercept(idxLambda1SE);
%Predict exam scores for the test data. Compare the predicted values to the actual exam grades using a reference line.
T_sim2 = XTest*coef + coef0;
T_sim1 = XTrain*coef + coef0;
figure
scatter(yTest,yhat)
hold on
plot(yTest,yTest)
xlabel('Actual Exam Grades')
ylabel('Predicted Exam Grades')
hold off
%%相关指标计算;
%R2
R1 = 1 - norm(yTrain - T_sim1)^2 / norm(yTrain - mean(yTrain))^2;
R2= 1 - norm(yTest - T_sim2)^2 / norm(yTest - mean(yTest ))^2;
disp(['训练集数据的R2为: ',num2str(R1)])
disp(['测试集数据的R2为: ', num2str(R2)])
%MAE
mae1 = sum(abs(T_sim1 - yTrain)) ./ 85;
mae2 = sum(abs(T_sim2 -yTest)) ./36;
disp(['训练集数据的MAE为: ',num2str(mae1)])
disp(['测试集数据的MAE为: ',num2str(mae2)])
%MBE
mbe1 = sum(T_sim1 - yTrain) ./85;
mbe2 = sum(T_sim2 - yTest) ./ 36;
disp(['训练集数据的MBE为: ',num2str(mbe1)])
disp(['测试集数据的MBE为: ',num2str(mbe2)])
%RMSE
rmse1 = sqrt(mean((T_sim1 - yTrain)));
rmse2 = sqrt(mean((T_sim2 - yTest)));
disp(['训练集数据的RMSE为: ',num2str(rmse1)])
disp(['测试集数据的RMSE为: ',num2str(rmse2)])
acc2=1-rmse2
disp(['模型的精度为: ',num2str(acc2)])
没有合适的资源?快使用搜索试试~ 我知道了~
资源推荐
资源详情
资源评论
收起资源包目录
29.elastic net回归.zip (3个子文件)
29.elastic net回归
main.m 6KB
main.asv 6KB
LS含量.xlsx 14KB
共 3 条
- 1
资源评论
AIDog
- 粉丝: 764
- 资源: 33
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功