% RBF.m
% Radial Basis Function Neural Network (RBFNN) designed to predict house prices in the Boston area
%
% Francis Poole
% 16/04/15
%
function RBF()
%Initalise
clear all
close all
clc
dbstop if error
%Initial Free Parameters
M = 10; %Number of centres
Sigma = 'cov'; %Variance of centres - 'cov' = covariance of k-means clusters, 'ave' = average distance between centres
lambda = 0; %Regularisation parameter
L = 3; %Number of committees
%Setup
%Get data
t = getTrainingData(); %Training data
p = getPredictionData(); %Prediction data
%Split training data to input and target
xStart = t(:,1:end-1); %Input data
dStart = t(:,end); %Target data
%K-fold setup
I = randperm(size(xStart,1)); %Random indexes
Error = [];
xStart = preProcess(xStart, 1);
%Optimisation
%K-fold Cross Validation-----------------------------------------------
n = 1;
for k = 2:10
for run = 1:10
Error(run,n) = RBFGaussian(xStart, dStart, k, M, Sigma, lambda, L, I); %Initial run
end
n = n+1;
end
Error = mean(Error,1)
figure(1)
plot(Error)
Error = []; %Reset Error
k = 7; %Assign best number of folds - (7)
%Number of centres-----------------------------------------------------
M = 1;
E = inf;
working = 1;
while working
Error
Error(M) = RBFGaussian(xStart, dStart, k, M, Sigma, lambda, L, I);
if E > Error(M)
E = Error(M);
lower = 1;
elseif lower
lower = 0;
else
working = 0;
end
M = M+1;
end
Error
figure(1)
plot(Error)
Error = [];
M = 7; %Assign best number of centres - (7)
%Variance of Centres---------------------------------------------------
for n = 1:2
Error(n,1) = RBFGaussian(xStart, dStart, k, M, 'avg', lambda, L, I);
Error(n,2) = RBFGaussian(xStart, dStart, k, M, 'cov', lambda, L, I);
end
Error = mean(Error,1)
bar(Error)
Error = [];
Sigma = 'cov'; %Assign best variance - (Covariance)
%Regularisation--------------------------------------------------------
n = 1;
for lambda = 0:0.01:0.2
Error(n) = RBFGaussian(xStart, dStart, k, M, Sigma, lambda, L, I);
n = n+1;
end
plot(Error)
Error
Error = [];
lambda = 0.06; %Assign best regularisation parameter - (0.06)
%Pre-processing--------------------------------------------------------
%Normalisation
xStart = t(:,1:end-1); %Get unprocessed input data
Error(1) = RBFGaussian(xStart, dStart, k, M, Sigma, lambda, L, I);
xStart = preProcess(xStart, 1); %NEED TO ADD PREDICTION DATA!
Error(2) = RBFGaussian(xStart, dStart, k, M, Sigma, lambda, L, I);
xStart = t(:,1:end-1); %Get unprocessed input data
xStart = preProcess(xStart, 2); %NEED TO ADD PREDICTION DATA!
Error(3) = RBFGaussian(xStart, dStart, k, M, Sigma, lambda, L, I);
%Dimensionality Reduction
xStart = t(:,1:end-1); %Get unprocessed input data
xStart = preProcess(xStart, 3);
Error(4) = RBFGaussian(xStart, dStart, k, M, Sigma, lambda, L, I);
Error
bar(Error)
xStart = t(:,1:end-1); %Get unprocessed input data
processedData = preProcess(xStart, 1);
%Committee Machines----------------------------------------------------------------------
n = 1;
for L = 1:8
Error(n) = RBFGaussian(xStart, dStart, k, M, Sigma, lambda, L, I);
plot(Error)
n = n+1;
end
Error
Error = [];
L = 4; %Assign best number of committies - (4)
xStart = t(:,1:end-1); %Get unprocessed input data
processedData = preProcess([xStart;p], 1); %Do best preprocessing - (Linear Rescaling)
xStart = processedData(1:end-size(p,1), :);
p = processedData(end-size(p,1)+1:end, :);
Y = predict(p, xStart, dStart, M, Sigma, lambda, L)
Y = Y.*1000;
end
function Y = predict(p, x, d, M, Sigma, lambda, L)
for committee = 1:L
%-----Stage 1 - Parameterise Hidden Layer
[c, Sigma] = getCentres(x, M, Sigma);
%-----Stage 2 - Find Weights
Phi = ones(size(x,1),size(c,1)+1); %Bias
if size(Sigma) > 1
for n = 1:size(x,1)
for m = 1:size(c,1)
Phi(n,m+1) = multiGaussianBasis(x(n,:)-c(m,:),Sigma(:,:,m)); %Multivariate gaussian basis function
end
end
else
for n = 1:size(x,1)
for m = 1:size(c,1)
Phi(n,m+1) = uniGaussianBasis(norm(x(n,:)-c(m,:)), Sigma); %Univariate gaussian basis function
end
end
end
W = pinv(pinv(Phi) * Phi + lambda * eye(size(c,1)+1)) * pinv(Phi) * d; %Using regularization theory
%-----Stage 3 - Cross Validate
pPhi = ones(size(p,1),size(c,1)+1); %Bias
if size(Sigma) > 1
for n = 1:size(p,1)
for m = 1:size(c,1)
pPhi(n,m+1) = multiGaussianBasis(p(n,:)-c(m,:),Sigma(:,:,m)); %Multivariate gaussian basis function
end
end
else
for n = 1:size(vX,1)
for m = 1:size(c,1)
pPhi(n,m+1) = uniGaussianBasis(norm(p(n,:)-c(m,:)), Sigma); %Univariate gaussian basis function
end
end
end
y(:,committee)= pPhi * W;
end
Y = mean(y,2);
end
function Error = RBFGaussian(xStart, dStart, k, M, Sigma, lambda, L, I)
N = size(xStart,1);
foldSize = floor(N/k); %Number of data points in each fold
for fold = 1:k
%Split data for fold
[x,vX] = kFold(xStart, fold, foldSize, I);
[d,vD] = kFold(dStart, fold, foldSize, I);
for committee = 1:L
%-----Stage 1 - Parameterise Hidden Layer
[c, Sigma] = getCentres(x, M, Sigma);
%-----Stage 2 - Find Weights
Phi = ones(size(x,1),size(c,1)+1); %Bias
if size(Sigma) > 1
for n = 1:size(x,1)
for m = 1:size(c,1)
Phi(n,m+1) = multiGaussianBasis(x(n,:)-c(m,:),Sigma(:,:,m)); %Multivariate gaussian basis function
end
end
else
for n = 1:size(x,1)
for m = 1:size(c,1)
Phi(n,m+1) = uniGaussianBasis(norm(x(n,:)-c(m,:)), Sigma); %Univariate gaussian basis function
end
end
end
W = pinv(pinv(Phi) * Phi + lambda * eye(size(c,1)+1)) * pinv(Phi) * d; %Using regularization theory
%-----Stage 3 - Cross Validate
vPhi = ones(size(vX,1),size(c,1)+1); %Bias
if size(Sigma) > 1
for n = 1:size(vX,1)
for m = 1:size(c,1)
vPhi(n,m+1) = multiGaussianBasis(vX(n,:)-c(m,:),Sigma(:,:,m)); %Multivariate gaussian basis function
end
end
else
for n = 1:size(vX,1)
for m = 1:size(c,1)
vPhi(n,m+1) = uniGaussianBasis(norm(vX(n,:)-c(m,:)), Sigma); %Univariate gaussian basis function
end
end
end
y(:,committee)= vPhi * W;
ECom(committee, fold) = mean(abs(y(:,committee)-vD));
end
Y = mean(y,2);
E(fold) = mean(abs(Y-vD));
%Printout
clc
ECom
mean(ECom)
E
k
M
lambda
size(c,1)
fold
end
Error = mean(E);
end
function x = preProcess(x, option)
%---Input Normalisation
%Linear Rescaling
if option == 1 || option == 3
rbf神经网络(matlab代码,含数据和测试)
2星 需积分: 50 99 浏览量
2017-10-09
10:39:36
上传
评论 15
收藏 17KB ZIP 举报
Keep_Going_HYC
- 粉丝: 18
- 资源: 12
最新资源
- 全卷积网络基于voc2012数据集简单pytorch实现
- pycharm的一些介绍-用于更好的学习python
- 基于C++的程序设计大赛天梯赛L2答案(天梯赛)
- 基于python实现的三次样条插值和均值插值法实现
- Python语言教程2-python批量图片大小处理-多文件夹
- Python语言教程1-python批量图片重命名,将后缀某几个不想要的字去除
- Space Combat Kit 太空战斗套件Unity游戏开发插件资源unitypackage C#
- Universal Device Preview 通用设备预览Unity游戏开发插件资源unitypackage
- Paladin Anim Set 圣骑士动画集Unity游戏动作动画插件资源unitypackage
- 计算机财务管理期末考报表部分题目及答案.doc
资源上传下载、课程学习等过程中有任何疑问或建议,欢迎提出宝贵意见哦~我们会及时处理!
点击此处反馈