clc;
% Generating random correlated data
mu = 50;
sigma = 5;
M = mu + sigma * randn(300, 2);
R = [1, 0.75; 0.75, 1];
L = chol(R);
M = M*L;
x = M(:,1); % Example Inputs, Replace by your data inputs for your own experiments
y = M(:,2); % Example labels, Replace by your data labels for your own experiments
% Min-max normalization of data
m = max(x); mn = min(x); mm = m-mn;
X = ((x-mn)/mm); Y = ((x-mn)/mm);
% 90%:10% splitting of data for training and testing
sz = (ceil(size(X,1))*0.9);
inputs = (X(1:sz))';
targets = (Y(1:sz))';
XTest = (X(sz+1:end))';
YTest = Y(sz+1:end)';
% number of neurons
n = 4;
tic;
% create a neural network
net = feedforwardnet(n);
% configure the neural network for this dataset
net = configure(net, inputs, targets);
% Denormalizaion and Prediction by FNN
FNN_Pred = ((net(XTest))' * mm) + mn;
%% BAT algorithms
%% Problem Definition
N = 20; % Number of Bats
Max_iter = 30; % Maximum number of iterations
fobj = @(x) NMSE(x, net, inputs, targets);
% Load details of the selected benchmark function
lb = -1; ub = 1;
dim = n^2 + n + n + 1;
[bestfit,x,fmax,BAT_Cg_curve]=newBAT(N,Max_iter,lb,ub,dim,fobj);
net = setwb(net, x');
% Denormalizaion and Prediction by BAT_FNN
BAT_FNN_Pred = ((net(XTest))' * mm) + mn;
YTest = (YTest * mm) + mn;
BAT_FNN_Execution_Time_Seconds = toc
% Plotting prediction results
figure;
plot(YTest,'LineWidth',2, 'Marker','diamond', 'MarkerSize',8);
hold on;
plot(FNN_Pred, 'LineWidth',2, 'Marker','x', 'MarkerSize',8);
plot(BAT_FNN_Pred, 'LineWidth',2, 'Marker','pentagram', 'MarkerSize',8);
title('BAT Optimization based Feed-Forward Neural Network');
xlabel('Time Interval');
ylabel('Values');
legend('Actual Values', 'FNN Predictions', 'BAT-FNN Predictions');
hold off;
% Performance Evaluaion of FNN and BAT-FNN
fprintf('Performance Evaluaion of FNN and BAT-FNN using Normalized Root Mean Square Error \n');
NRMSE_FNN = (abs( sqrt( mean(mean((FNN_Pred - YTest).^2) )) )) / (max(YTest)-min(YTest))
NRMSE_BAT_FNN = (abs( sqrt( mean(mean((BAT_FNN_Pred - YTest).^2) ) ) )) / (max(YTest)-min(YTest))
figure;
semilogy(BAT_Cg_curve, 'LineWidth',2, 'Color','r')
title('Objective Space Optimization Progress')
xlabel('Iteration');
ylabel('Best Fitness');
axis tight
grid on
box on
legend('BAT Optimization')
display(['The best solution obtained by BAT is : ', num2str(x)]);
display(['The best optimal value of the objective funciton found by BAT is : ', num2str(bestfit)]);
function [bestfit,BestPositions,fmin,Convergence_curve]=newBAT(N,Max_iter,lb,ub,dim,fobj)
Fmax=2; %maximum frequency
Fmin=0; %minimum frequency
A=rand(N,1); %loudness for each BAT
r=rand(N,1); %pulse emission rate for each BAT
alpha=0.5; %constant for loudness update
gamma=0.5; %constant for emission rate update
ro=0.001; %initial pulse emission rate
% Initializing arrays
F=zeros(N,1); % Frequency
v=zeros(N,dim); % Velocities
% Initialize the population
x=initializationb(N,dim,ub,lb);
Convergence_curve=zeros(1,Max_iter);
%calculate the initial solution for initial positions
for ii=1:N
fitness(ii)=fobj(x(ii,:));
end
[fmin,index]=min(fitness); %find the initial best fitness value,
bestsol=x(index,:); %find the initial best solution for best fitness value
%%
iter=1; % start the loop counter
while iter<=Max_iter %start the loop for iterations
for ii=1:size(x)
F(ii)=Fmin+(Fmax-Fmin)*rand; %randomly chose the frequency
v(ii,:)=v(ii,:)+(x(ii,:)-bestsol)*F(ii); %update the velocity
x(ii,:)=x(ii,:)+v(ii,:); %update the BAT position
% x(ii,:)=round(x(ii,:));
% Apply simple bounds/limits
Flag4up=x(ii,:)>ub;
Flag4low=x(ii,:)<lb;
x(ii,:)=(x(ii,:).*(~(Flag4up+Flag4low)))+ub.*Flag4up+lb.*Flag4low;
%check the condition with r
if rand>r(ii)
% The factor 0.001 limits the step sizes of random walks
% x(ii,:)=bestsol+0.001*randn(1,dim);
eps=-1+(1-(-1))*rand;
x(ii,:)=bestsol+eps*mean(A);
end
fitnessnew=fobj(x(ii,:)); % calculate the objective function
% Update if the solution improves, or not too loud
if (fitnessnew<=fitness(ii)) && (rand<A(ii)) ,
% if (fitnessnew>=fitness(ii)) && (rand<A(ii))
fitness(ii)=fitnessnew;
A(ii)=alpha*A(ii);
r(ii)=ro*(1-exp(-gamma*iter));
end
if fitnessnew<=fmin
% if fitnessnew>=fmin
bestsol=x(ii,:);
fmin=fitnessnew;
end
end
Convergence_curve(iter)= fmin;
iter=iter+1; % update the while loop counter
end
%
[bestfit]=(fmin);
BestPositions=bestsol;
end
% This function initialize the first population of search agents
function x=initializationb(N,dim,ub,lb)
Boundary_no= size(ub,2); % numnber of boundaries
% If the boundaries of all variables are equal and user enter a signle
% number for both ub and lb
if Boundary_no == 1
x = rand(N,dim).*(ub-lb)+lb;
end
% If each variable has a different lb and ub
if Boundary_no>1
for i=1:dim
ub_i=ub(i);
lb_i=lb(i);
x(:,i)=rand(N,1).*(ub_i-lb_i)+lb_i;
end
end
end
% Objective Function for minimizing normalized mean square error of FNN by
% updation of nework's weights and biases
function [f] = NMSE(wb, net, input, target)
% wb is the weights and biases row vector obtained from the genetic algorithm.
% It must be transposed when transferring the weights and biases to the network net.
net = setwb(net, wb');
% The net output matrix is given by net(input). The corresponding error matrix is given by
error = target - net(input);
% The mean squared error normalized by the mean target variance is
f = (mean(error.^2)/mean(var(target',1)));
% It is independent of the scale of the target components and related to the Rsquare statistic via
% Rsquare = 1 - NMSEcalc ( see Wikipedia)
end
没有合适的资源?快使用搜索试试~ 我知道了~
温馨提示
1.版本:matlab2014/2019a,内含运行结果,不会运行可私信 2.领域:智能优化算法、神经网络预测、信号处理、元胞自动机、图像处理、路径规划、无人机等多种领域的Matlab仿真,更多内容可点击博主头像 3.内容:标题所示,对于介绍可点击主页搜索博客 4.适合人群:本科,硕士等教研学习使用 5.博客介绍:热爱科研的Matlab仿真开发者,修心和技术同步精进,matlab项目合作可si信
资源推荐
资源详情
资源评论
收起资源包目录
【BA-FNN】基于蝙蝠优化的模糊神经网络FNN研究Matlab代码.rar (3个子文件)
BAT_FNN_By_Sana_Mujeeb.m 6KB
基于蝙蝠算法优化模糊神经网络的耙吸挖泥船耙头吸入密度研究.caj 1.61MB
2.png 25KB
共 3 条
- 1
资源评论
天天Matlab科研工作室
- 粉丝: 4w+
- 资源: 1万+
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
最新资源
资源上传下载、课程学习等过程中有任何疑问或建议,欢迎提出宝贵意见哦~我们会及时处理!
点击此处反馈
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功