%% 清空环境变量
warning off % 关闭报警信息
close all % 关闭开启的图窗
clear % 清空变量
clc
%% 导入数据
addpath(genpath(pwd));
data = readmatrix('风电场预测.xlsx');
data = data(5665:8640,12); %选取3月份数据
[h1,l1]=data_process(data,24); %步长为24,采用前24个时刻的温度预测第25个时刻的温度
res = [h1,l1];
num_samples = size(res,1); %样本个数
% 训练集和测试集划分
outdim = 1; % 最后一列为输出
num_size = 0.7; % 训练集占数据集比例
num_train_s = round(num_size * num_samples); % 训练集样本个数
f_ = size(res, 2) - outdim; % 输入特征维度
P_train = res(1: num_train_s, 1: f_)';
T_train = res(1: num_train_s, f_ + 1: end)';
M = size(P_train, 2);
P_test = res(num_train_s + 1: end, 1: f_)';
T_test = res(num_train_s + 1: end, f_ + 1: end)';
N = size(P_test, 2);
% 数据归一化
[p_train, ps_input] = mapminmax(P_train, 0, 1);
p_test = mapminmax('apply', P_test, ps_input);
[t_train, ps_output] = mapminmax(T_train, 0, 1);
t_test = mapminmax('apply', T_test, ps_output);
% 格式转换
for i = 1 : M
vp_train{i, 1} = p_train(:, i);
vt_train{i, 1} = t_train(:, i);
end
for i = 1 : N
vp_test{i, 1} = p_test(:, i);
vt_test{i, 1} = t_test(:, i);
end
numFeatures = size(p_train,1);
%% 网络搭建CNN-BiGRU-ATTENTION
lgraph = layerGraph();
% 添加层分支
% 将网络分支添加到层次图中。每个分支均为一个线性层组。
tempLayers = sequenceInputLayer([numFeatures,1,1],"Name","sequence");
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([3,1],16,"Name","conv","Padding","same")
batchNormalizationLayer("Name","batchnorm")
reluLayer("Name","relu")
maxPooling2dLayer([2 2],"Name","maxpool","Padding","same")
convolution2dLayer([3,1],16,"Name","conv2","Padding","same")
batchNormalizationLayer("Name","batchnorm2")
reluLayer("Name","relu2")
maxPooling2dLayer([2 2],"Name","maxpool2","Padding","same")
flattenLayer("Name","flatten_1")
fullyConnectedLayer(25,"Name","fc_1")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = flattenLayer("Name","flatten");
lgraph = addLayers(lgraph,tempLayers);
tempLayers = gruLayer(35,"Name","gru1");
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
FlipLayer("flip3")
gruLayer(35,"Name","gru2")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
concatenationLayer(1,3,"Name","concat")
selfAttentionLayer(1,50,"Name","selfattention") %Attention机制
fullyConnectedLayer(outdim,"Name","fc")
regressionLayer("Name","regressionoutput")];
lgraph = addLayers(lgraph,tempLayers);
% 清理辅助变量
clear tempLayers;
% 连接层分支
% 连接网络的所有分支以创建网络图。
lgraph = connectLayers(lgraph,"sequence","conv");
lgraph = connectLayers(lgraph,"sequence","flatten");
lgraph = connectLayers(lgraph,"flatten","gru1");
lgraph = connectLayers(lgraph,"flatten","flip3");
lgraph = connectLayers(lgraph,"gru1","concat/in1");
lgraph = connectLayers(lgraph,"gru2","concat/in2");
lgraph = connectLayers(lgraph,"fc_1","concat/in3");
%% 参数设置
options = trainingOptions('adam', ... % 优化算法Adam
'MaxEpochs', 30, ... % 最大训练次数
'GradientThreshold', 1, ... % 梯度阈值
'InitialLearnRate', 0.01, ... % 初始学习率
'LearnRateSchedule', 'piecewise', ... % 学习率调整
'LearnRateDropPeriod', 22, ... % 训练60次后开始调整学习率
'LearnRateDropFactor',0.1, ... % 学习率调整因子
'ExecutionEnvironment', 'cpu',... % 训练环境
'Verbose', 1, ... % 关闭优化过程
'Plots', 'none'); % 画出曲线
% 训练
tic
net = trainNetwork(vp_train, vt_train, lgraph, options);
toc
analyzeNetwork(net);% 查看网络结构
% 预测
t_sim1 = predict(net, vp_train);
t_sim2 = predict(net, vp_test);
% 数据反归一化
T_sim1 = mapminmax('reverse', t_sim1, ps_output);
T_sim2 = mapminmax('reverse', t_sim2, ps_output);
T_train1 = T_train;
T_test2 = T_test;
% 数据格式转换
T_sim1 = cell2mat(T_sim1);% cell2mat将cell元胞数组转换为普通数组
T_sim2 = cell2mat(T_sim2);
T_sim1 = T_sim1';
T_sim2 = T_sim2';
CNN_BiGRU_ATTENTION_TSIM1 = T_sim1;
CNN_BiGRU_ATTENTION_TSIM2 = T_sim2;
save CNN_BiGRU_ATTENTION CNN_BiGRU_ATTENTION_TSIM1 CNN_BiGRU_ATTENTION_TSIM2
% 指标计算
disp('…………训练集误差指标…………')
[mae1,rmse1,mape1,error1]=calc_error(T_train1,T_sim1);
fprintf('\n')
figure('Position',[200,300,600,200])
plot(T_train1);
hold on
plot(T_sim1)
legend('真实值','预测值')
title('CNN-BiGRU-ATTENTION训练集预测效果对比')
xlabel('样本点')
ylabel('发电功率')
disp('…………测试集误差指标…………')
[mae2,rmse2,mape2,error2]=calc_error(T_test2,T_sim2);
fprintf('\n')
figure('Position',[200,300,600,200])
plot(T_test2);
hold on
plot(T_sim2)
legend('真实值','预测值')
title('CNN-BiGRU-ATTENTION预测集预测效果对比')
xlabel('样本点')
ylabel('发电功率')
figure('Position',[200,300,600,200])
plot(T_sim2-T_test2)
title('CNN-BiGRU-ATTENTION误差曲线图')
xlabel('样本点')
ylabel('发电功率')
%% 优化CNN-BiGRU-Attention
disp(' ')
disp('优化CNN_BiLSTM_attention神经网络:')
%% 初始化参数
popsize=10; %初始种群规模
maxgen=8; %最大进化代数
fobj = @(x)objectiveFunction(x,numFeatures,outdim,vp_train,vt_train,vp_test,T_test,ps_output);
% 优化参数设置
lb = [0.001 10 2 2]; %参数的下限。分别是学习率,biGRU的神经元个数,注意力机制的键值, 卷积核大小
ub = [0.01 50 50 10]; %参数的上限
dim = length(lb);%数量
% 可选:'DBO','GWO','OOA','PSO','SABO','SCSO','SSA','BWO','RIME','WOA','HHO','NGO';
[Best_score,Best_pos,curve]=NGO(popsize,maxgen,lb,ub,dim,fobj); %修改这里的函数名字即可
setdemorandstream(pi);
%% 绘制进化曲线
figure
plot(curve,'r-','linewidth',2)
xlabel('进化代数')
ylabel('均方误差')
legend('最佳适应度')
title('进化曲线')
%% 把最佳参数Best_pos回带
[~,optimize_T_sim] = objectiveFunction(Best_pos,numFeatures,outdim,vp_train,vt_train,vp_test,T_test,ps_output);
setdemorandstream(pi);
%% 比较算法预测值
str={'真实值','CNN-BiGRU-Attention','优化后CNN-BiGRU-Attention'};
figure('Units', 'pixels', ...
'Position', [300 300 860 370]);
plot(T_test,'-','Color',[0.8500 0.3250 0.0980])
hold on
plot(T_sim2,'-.','Color',[0.4940 0.1840 0.5560])
hold on
plot(optimize_T_sim,'-','Color',[0.4660 0.6740 0.1880])
legend(str)
set (gca,"FontSize",12,'LineWidth',1.2)
box off
legend Box off
%% 比较算法误差
test_y = T_test;
Test_all = [];
y_test_predict = T_sim2;
[test_MAE,test_MAPE,test_MSE,test_RMSE,test_R2]=calc_error(y_test_predict,test_y);
Test_all=[Test_all;test_MAE test_MAPE test_MSE test_RMSE test_R2];
y_test_predict = optimize_T_sim;
[test_MAE,test_MAPE,test_MSE,test_RMSE,test_R2]=calc_error(y_test_predict,test_y);
Test_all=[Test_all;test_MAE test_MAPE test_MSE test_RMSE test_R2];
str={'真实值','CNN-BiGRU-Attention','优化后CNN-BiGRU-Attention'};
str1=str(2:end);
str2={'MAE','MAPE','MSE','RMSE','R2'};
data_out=array2table(Test_all);
data_out.Properties.VariableNames=str2;
data_out.Properties.RowNames=str1;
disp(data_out)
%% 柱状图 MAE MAPE RMSE 柱状图适合量纲差别不大的
color= [0.66669 0.1206 0.108
0.1339 0.7882 0.8588
0.1525 0