function [model,cost,O3] = tunelssvm(model, varargin)
% Tune the hyperparameters of the model with respect to the given performance measure
%
% 1. Using the functional interface:
%
%
% >> [gam, sig2, cost] = tunelssvm({X,Y,type,igam,isig2,kernel,preprocess})
% >> [gam, sig2, cost] = tunelssvm({X,Y,type,igam,isig2,kernel,preprocess}, StartingValues)
% >> [gam, sig2, cost] = tunelssvm({X,Y,type,igam,isig2,kernel,preprocess},...
% StartingValues, optfun, optargs)
% >> [gam, sig2, cost] = tunelssvm({X,Y,type,igam,isig2,kernel,preprocess},...
% StartingValues, optfun, optargs, costfun, costargs)
%
% Outputs
% gam : Optimal regularization parameter
% sig2 : Optimal kernel parameter(s)
% cost(*) : Estimated cost of the optimal hyperparameters
% Inputs
% X : N x d matrix with the inputs of the training data
% Y : N x 1 vector with the outputs of the training data
% type : 'function estimation' ('f') or 'classifier' ('c')
% igam : Starting value of the regularization parameter
% isig2 : Starting value of the kernel parameter(s) (bandwidth in the case of the 'RBF_kernel')
% kernel(*) : Kernel type (by default 'RBF_kernel')
% preprocess(*) : 'preprocess'(*) or 'original'
% StartingValues(*) : Starting values of the optimization routine (or '[]')
% optfun(*) : Optimization function (by default 'gridsearch')
% optargs(*) : Cell with extra optimization function arguments
% costfun(*) : Function estimating the cost-criterion (by default 'crossvalidate')
% costargs(*) : Cell with extra cost function arguments
%
% 2. Using the object oriented interface:
%
% >> [model, cost] = tunelssvm(model)
% >> [model, cost] = tunelssvm(model, StartingValues)
% >> [model, cost] = tunelssvm(model, StartingValues, optfun, optargs)
% >> [model, cost] = tunelssvm(model, StartingValues, optfun, optargs, costfun, costargs)
%
% Outputs
% model : Object oriented representation of the LS-SVM model with optimal hyperparameters
% cost(*) : Estimated cost of the optimal hyperparameters
% Inputs
% model : Object oriented representation of the LS-SVM model with initial hyperparameters
% StartingValues(*): Starting values of the optimization routine (or '[]')
% optfun(*) : Optimization function (by default 'gridsearch')
% optfun(*) : Cell with extra optimization function arguments
% costfun(*) : Function estimating the cost-criterion (by default 'crossvalidate')
% optfun(*) : Cell with extra cost function arguments
%
% See also:
% trainlssvm, crossvalidate, validate, gridsearch, linesearch
% Copyright (c) 2009, KULeuven-ESAT-SCD, License & help @ http://www.esat.kuleuven.ac.be/sista/lssvmlab
%
% initiate variables
%
if iscell(model),
model = initlssvm(model{:});
func=1;
else
func=0;
end
%
% defaults
%
if length(varargin)>=1, optfun = varargin{1}; else optfun='gridsearch';end
if length(varargin)>=2, costfun = varargin{2}; else costfun ='crossvalidatelssvm'; end
if length(varargin)>=3, costargs = varargin{3}; else costargs ={}; end
if strcmp(costfun,'crossvalidatelssvm') || strcmp(costfun,'rcrossvalidatelssvm') || strcmp(costfun,'crossvalidatesparselssvm')
if size(costargs,2)==1, error('Specify the number of folds for CV'); end
[Y,omega] = helpkernel(model.xtrain,model.ytrain,model.kernel_type,costargs{2},0);
costargs = {Y,costargs{1},omega,costargs{2}};
end
if strcmp(costfun,'crossvalidate2lp1')
fprintf('\n')
disp('-->> Cross-Validation for Correlated Errors: Determine optimal ''l'' for leave (2l+1) out CV')
% if user specifies 'l'
if numel(costargs)==1, luser = NaN; else luser = costargs{2};end
[l,index] = cvl(model.xtrain,model.ytrain,luser); % First determine the 'l' for the CV
fprintf(['\n -->> Optimal l = ' num2str(l)]);
fprintf('\n')
[Y,omega] = helpkernel(model.xtrain,model.ytrain,model.kernel_type,[],1);
costargs = {Y,index,omega,costargs{1}};
end
if strcmp(costfun,'gcrossvalidatelssvm') || strcmp(costfun,'leaveoneoutlssvm')
[Y,omega] = helpkernel(model.xtrain,model.ytrain,model.kernel_type,[],0);
costargs = {Y,omega,costargs{1}};
end
if strcmp(costfun,'rcrossvalidatelssvm')
eval('model.weights = varargin{4};','model.weights = ''wmyriad''; ')
end
if strcmp(costfun,'crossvalidatelssvm_SIM')
[Y,omega] = helpkernel(model.xtrain,model.ytrain,model.kernel_type,[],1);
costargs = {model.xtrain,Y,costargs{1},omega,costargs{2}};
end
% change the coding type for multiclass and set default 'OneVsOne' if no
% coding type specified
%if length(varargin)>=5 && ~isempty(varargin{5})
if model.type(1) =='c' && ~(sum(unique(model.ytrain))==1 || sum(unique(model.ytrain))==0)
eval('coding = varargin{4};','coding = ''code_OneVsOne''; ')
varargin{5}= coding;
model = changelssvm(model,'codetype',coding);
[yc,cb,oldcb] = code(model.ytrain,coding);
y_dimold = model.y_dim;
model.ytrain = yc; model.y_dim = size(yc,2);
varargin{end} = []; clear yc
end
%
% multiple outputs
if (model.y_dim>1)% & (size(model.kernel_pars,1)==model.y_dim |size(model.gam,1)==model.y_dim |prod(size(model.kernel_type,1))==model.y_dim))
disp('-->> tune individual outputs');
if model.type(1) == 'c'
fprintf('\n')
disp(['-->> Encoding scheme: ',coding]);
end
costs = zeros(model.y_dim,1); gamt = zeros(1,model.y_dim);
for d=1:model.y_dim,
sel = ~isnan(model.ytrain(:,d));
fprintf(['\n\n -> dim ' num2str(d) '/' num2str(model.y_dim) ':\n']);
try kernel = model.kernel_type{d}; catch, kernel=model.kernel_type;end
[g,s,c] = tunelssvm({model.xtrain(sel,:),model.ytrain(sel,d),model.type,[],[],kernel,'original'},varargin{:});
gamt(:,d) = g;
try kernel_part(:,d) = s; catch, kernel_part = [];end
costs(d) = c;
end
model.gam = gamt;
model.kernel_pars = kernel_part;
if func,
O3 = costs;
cost = model.kernel_pars;
model = model.gam;
end
% decode to the original model.yfull
if model.code(1) == 'c', % changed
model.ytrain = code(model.ytrain, oldcb, [], cb, 'codedist_hamming');
model.y_dim = y_dimold;
end
return
end
%-------------------------------------------------------------------------%
if strcmp(model.kernel_type,'lin_kernel'),
if ~strcmp(model.weights,'wmyriad') && ~strcmp(model.weights,'whuber')
[par,fval] = csa(rand(1,5),@(x)simanncostfun1(x,model,costfun,costargs));
else
[par,fval] = csa(rand(2,5),@(x)simanncostfun1(x,model,costfun,costargs));
model.delta = exp(par(2));
end
model = changelssvm(changelssvm(model,'gam',exp(par(1))),'kernel_pars',[]); clear par
fprintf('\n')
disp([' 1. Coupled Simulated Annealing results: [gam] ' num2str(model.gam)]);
disp([' F(X)= ' num2str(fval)]);
disp(' ')
elseif strcmp(model.kernel_type,'RBF_kernel') || strcmp(model.kernel_type,'sinc_kernel') || strcmp(model.kernel_type,'RBF4_kernel')
if ~strcmp(model.weights,'wmyriad') && ~strcmp(model.weights,'whuber')
[par,fval] = csa(rand(2,5),@(x)simanncostfun2(x,model,costfun,costargs));
else
[par,fval] = csa(rand(3,5),@(x)simanncostfun2(x,model,costfun,costargs));
model.delta = exp(par(3));
end
model = changelssvm(changelssvm(model,'gam',exp(par(1))),'kernel_pars',exp(par(2)));
fprintf('\n')
disp([' 1. Coupled Simulated Annealing results: [gam] ' num2str(model.gam)]);
disp(['
没有合适的资源?快使用搜索试试~ 我知道了~
温馨提示
Matlab实现RIME-LSSVM霜冰算法优化最小二乘支持向量机多变量回归预测 1.data为数据集,输入6个特征,输出一个变量。 2.main.m为程序主文件,其余为函数文件无需运行。 3.霜冰算法优化最小二乘支持向量机,优化RBF 核函数gam和sig。 4.注意程序和数据放在一个文件夹,运行环境为Matlab2018及以上. 5.命令窗口输出R2、MSE、MAE、MAPE和MBE多指标评价; 6.程序语言为matlab,程序可出预测效果图,迭代优化图,相关分析图;
资源推荐
资源详情
资源评论
收起资源包目录
RIME-LSSVM回归.zip (95个子文件)
3.png 85KB
1.png 24KB
6.png 42KB
RIME-LSSVM回归
RIME.m 2KB
initialization.m 459B
LSSVMlabv
roc.m 7KB
bay_rr.m 4KB
windowizeNARX.m 2KB
trainlssvm.m 9KB
demo_fixedsize.m 3KB
code_ECOC.m 5KB
weightingscheme.m 928B
codelssvm.m 4KB
code_MOC.m 548B
kernel_matrix2.m 795B
simplex.m 10KB
kpca.m 6KB
lssvmMATLAB.m 2KB
lssvm.m 2KB
csa.m 3KB
predict.m 3KB
bay_lssvmARD.m 8KB
demofun.m 4KB
initlssvm.m 3KB
rsimplex.m 10KB
predlssvm.m 5KB
lin_kernel.m 529B
eign.m 4KB
leaveoneoutlssvm.m 2KB
ridgeregress.m 1KB
preimage_rbf.m 4KB
range.m 173B
progress.m 1KB
gcrossvalidate.m 3KB
leaveoneout.m 4KB
codedist_loss.m 2KB
smootherlssvm.m 1KB
postlssvm.m 5KB
rcrossvalidatelssvm.m 4KB
mae.m 281B
demomulticlass.m 2KB
demo_yinyang.m 3KB
rcrossvalidate.m 6KB
gridsearch.m 7KB
codedist_bay.m 2KB
windowize.m 2KB
code_OneVsAll.m 361B
prelssvm.m 6KB
kernel_matrix.m 3KB
crossvalidatelssvm.m 4KB
democonfint.m 2KB
gcrossvalidatelssvm.m 2KB
tunelssvm.m 23KB
kentropy.m 2KB
bay_optimize.m 6KB
crossvalidate.m 6KB
democlass.m 3KB
cilssvm.m 5KB
trimmedmse.m 2KB
changelssvm.m 5KB
linf.m 310B
latticeseq_b2.m 6KB
robustlssvm.m 2KB
misclass.m 684B
bitreverse32.m 1KB
MLP_kernel.m 603B
demo_fixedclass.m 2KB
bay_modoutClass.m 9KB
code_OneVsOne.m 576B
simann.m 5KB
poly_kernel.m 623B
linesearch.m 4KB
bay_initlssvm.m 2KB
bay_errorbar.m 6KB
plotlssvm.m 10KB
demomodel.m 5KB
RBF_kernel.m 1KB
latentlssvm.m 2KB
tbform.m 3KB
bay_lssvm.m 10KB
codedist_hamming.m 753B
denoise_kpca.m 4KB
medae.m 302B
simlssvm.m 6KB
AFEm.m 3KB
code.m 4KB
main.m 6KB
CPO.m 3KB
fitnessfunclssvm.m 509B
data.xlsx 36KB
5.png 27KB
4.png 58KB
8.png 44KB
7.png 36KB
2.png 33KB
共 95 条
- 1
资源评论
前程算法屋
- 粉丝: 5474
- 资源: 782
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
最新资源
资源上传下载、课程学习等过程中有任何疑问或建议,欢迎提出宝贵意见哦~我们会及时处理!
点击此处反馈
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功