function [model,cost,O3] = tunelssvm(model, varargin)
% Tune the hyperparameters of the model with respect to the given performance measure
%
% 1. Using the functional interface:
%
%
% >> [gam, sig2, cost] = tunelssvm({X,Y,type,igam,isig2,kernel,preprocess})
% >> [gam, sig2, cost] = tunelssvm({X,Y,type,igam,isig2,kernel,preprocess}, StartingValues)
% >> [gam, sig2, cost] = tunelssvm({X,Y,type,igam,isig2,kernel,preprocess},...
% StartingValues, optfun, optargs)
% >> [gam, sig2, cost] = tunelssvm({X,Y,type,igam,isig2,kernel,preprocess},...
% StartingValues, optfun, optargs, costfun, costargs)
%
% Outputs
% gam : Optimal regularization parameter
% sig2 : Optimal kernel parameter(s)
% cost(*) : Estimated cost of the optimal hyperparameters
% Inputs
% X : N x d matrix with the inputs of the training data
% Y : N x 1 vector with the outputs of the training data
% type : 'function estimation' ('f') or 'classifier' ('c')
% igam : Starting value of the regularization parameter
% isig2 : Starting value of the kernel parameter(s) (bandwidth in the case of the 'RBF_kernel')
% kernel(*) : Kernel type (by default 'RBF_kernel')
% preprocess(*) : 'preprocess'(*) or 'original'
% StartingValues(*) : Starting values of the optimization routine (or '[]')
% optfun(*) : Optimization function (by default 'gridsearch')
% optargs(*) : Cell with extra optimization function arguments
% costfun(*) : Function estimating the cost-criterion (by default 'crossvalidate')
% costargs(*) : Cell with extra cost function arguments
%
% 2. Using the object oriented interface:
%
% >> [model, cost] = tunelssvm(model)
% >> [model, cost] = tunelssvm(model, StartingValues)
% >> [model, cost] = tunelssvm(model, StartingValues, optfun, optargs)
% >> [model, cost] = tunelssvm(model, StartingValues, optfun, optargs, costfun, costargs)
%
% Outputs
% model : Object oriented representation of the LS-SVM model with optimal hyperparameters
% cost(*) : Estimated cost of the optimal hyperparameters
% Inputs
% model : Object oriented representation of the LS-SVM model with initial hyperparameters
% StartingValues(*): Starting values of the optimization routine (or '[]')
% optfun(*) : Optimization function (by default 'gridsearch')
% optfun(*) : Cell with extra optimization function arguments
% costfun(*) : Function estimating the cost-criterion (by default 'crossvalidate')
% optfun(*) : Cell with extra cost function arguments
%
% See also:
% trainlssvm, crossvalidate, validate, gridsearch, linesearch
% Copyright (c) 2009, KULeuven-ESAT-SCD, License & help @ http://www.esat.kuleuven.ac.be/sista/lssvmlab
%
% initiate variables
%
if iscell(model),
model = initlssvm(model{:});
func=1;
else
func=0;
end
%
% defaults
%
if length(varargin)>=1, optfun = varargin{1}; else optfun='gridsearch';end
if length(varargin)>=2, costfun = varargin{2}; else costfun ='crossvalidatelssvm'; end
if length(varargin)>=3, costargs = varargin{3}; else costargs ={}; end
if strcmp(costfun,'crossvalidatelssvm') || strcmp(costfun,'rcrossvalidatelssvm') || strcmp(costfun,'crossvalidatesparselssvm')
if size(costargs,2)==1, error('Specify the number of folds for CV'); end
[Y,omega] = helpkernel(model.xtrain,model.ytrain,model.kernel_type,costargs{2},0);
costargs = {Y,costargs{1},omega,costargs{2}};
end
if strcmp(costfun,'crossvalidate2lp1')
fprintf('\n')
disp('-->> Cross-Validation for Correlated Errors: Determine optimal ''l'' for leave (2l+1) out CV')
% if user specifies 'l'
if numel(costargs)==1, luser = NaN; else luser = costargs{2};end
[l,index] = cvl(model.xtrain,model.ytrain,luser); % First determine the 'l' for the CV
fprintf(['\n -->> Optimal l = ' num2str(l)]);
fprintf('\n')
[Y,omega] = helpkernel(model.xtrain,model.ytrain,model.kernel_type,[],1);
costargs = {Y,index,omega,costargs{1}};
end
if strcmp(costfun,'gcrossvalidatelssvm') || strcmp(costfun,'leaveoneoutlssvm')
[Y,omega] = helpkernel(model.xtrain,model.ytrain,model.kernel_type,[],0);
costargs = {Y,omega,costargs{1}};
end
if strcmp(costfun,'rcrossvalidatelssvm')
eval('model.weights = varargin{4};','model.weights = ''wmyriad''; ')
end
if strcmp(costfun,'crossvalidatelssvm_SIM')
[Y,omega] = helpkernel(model.xtrain,model.ytrain,model.kernel_type,[],1);
costargs = {model.xtrain,Y,costargs{1},omega,costargs{2}};
end
% change the coding type for multiclass and set default 'OneVsOne' if no
% coding type specified
%if length(varargin)>=5 && ~isempty(varargin{5})
if model.type(1) =='c' && ~(sum(unique(model.ytrain))==1 || sum(unique(model.ytrain))==0)
eval('coding = varargin{4};','coding = ''code_OneVsOne''; ')
varargin{5}= coding;
model = changelssvm(model,'codetype',coding);
[yc,cb,oldcb] = code(model.ytrain,coding);
y_dimold = model.y_dim;
model.ytrain = yc; model.y_dim = size(yc,2);
varargin{end} = []; clear yc
end
%
% multiple outputs
if (model.y_dim>1)% & (size(model.kernel_pars,1)==model.y_dim |size(model.gam,1)==model.y_dim |prod(size(model.kernel_type,1))==model.y_dim))
disp('-->> tune individual outputs');
if model.type(1) == 'c'
fprintf('\n')
disp(['-->> Encoding scheme: ',coding]);
end
costs = zeros(model.y_dim,1); gamt = zeros(1,model.y_dim);
for d=1:model.y_dim,
sel = ~isnan(model.ytrain(:,d));
fprintf(['\n\n -> dim ' num2str(d) '/' num2str(model.y_dim) ':\n']);
try kernel = model.kernel_type{d}; catch, kernel=model.kernel_type;end
[g,s,c] = tunelssvm({model.xtrain(sel,:),model.ytrain(sel,d),model.type,[],[],kernel,'original'},varargin{:});
gamt(:,d) = g;
try kernel_part(:,d) = s; catch, kernel_part = [];end
costs(d) = c;
end
model.gam = gamt;
model.kernel_pars = kernel_part;
if func,
O3 = costs;
cost = model.kernel_pars;
model = model.gam;
end
% decode to the original model.yfull
if model.code(1) == 'c', % changed
model.ytrain = code(model.ytrain, oldcb, [], cb, 'codedist_hamming');
model.y_dim = y_dimold;
end
return
end
%-------------------------------------------------------------------------%
if strcmp(model.kernel_type,'lin_kernel'),
if ~strcmp(model.weights,'wmyriad') && ~strcmp(model.weights,'whuber')
[par,fval] = csa(rand(1,5),@(x)simanncostfun1(x,model,costfun,costargs));
else
[par,fval] = csa(rand(2,5),@(x)simanncostfun1(x,model,costfun,costargs));
model.delta = exp(par(2));
end
model = changelssvm(changelssvm(model,'gam',exp(par(1))),'kernel_pars',[]); clear par
fprintf('\n')
disp([' 1. Coupled Simulated Annealing results: [gam] ' num2str(model.gam)]);
disp([' F(X)= ' num2str(fval)]);
disp(' ')
elseif strcmp(model.kernel_type,'RBF_kernel') || strcmp(model.kernel_type,'sinc_kernel') || strcmp(model.kernel_type,'RBF4_kernel')
if ~strcmp(model.weights,'wmyriad') && ~strcmp(model.weights,'whuber')
[par,fval] = csa(rand(2,5),@(x)simanncostfun2(x,model,costfun,costargs));
else
[par,fval] = csa(rand(3,5),@(x)simanncostfun2(x,model,costfun,costargs));
model.delta = exp(par(3));
end
model = changelssvm(changelssvm(model,'gam',exp(par(1))),'kernel_pars',exp(par(2)));
fprintf('\n')
disp([' 1. Coupled Simulated Annealing results: [gam] ' num2str(model.gam)]);
disp(['
没有合适的资源?快使用搜索试试~ 我知道了~
温馨提示
MATLAB实现LSSVM(最小二乘支持向量机)二分类预测(完整源码和数据) MATLAB实现LSSVM(最小二乘支持向量机)二分类预测(完整源码和数据) MATLAB实现LSSVM(最小二乘支持向量机)二分类预测(完整源码和数据) MATLAB实现LSSVM(最小二乘支持向量机)二分类预测(完整源码和数据)
资源推荐
资源详情
资源评论
收起资源包目录
分类预测:LSSVM.zip (83个子文件)
LSSVMC
LSSVMlabv
roc.m 7KB
bay_rr.m 4KB
windowizeNARX.m 2KB
trainlssvm.m 9KB
demo_fixedsize.m 3KB
code_ECOC.m 5KB
weightingscheme.m 928B
codelssvm.m 4KB
code_MOC.m 548B
kernel_matrix2.m 795B
simplex.m 10KB
kpca.m 6KB
lssvmMATLAB.m 2KB
lssvm.m 2KB
csa.m 3KB
predict.m 3KB
bay_lssvmARD.m 8KB
demofun.m 4KB
initlssvm.m 3KB
rsimplex.m 10KB
predlssvm.m 5KB
lin_kernel.m 529B
eign.m 4KB
leaveoneoutlssvm.m 2KB
ridgeregress.m 1KB
preimage_rbf.m 4KB
range.m 173B
progress.m 1KB
gcrossvalidate.m 3KB
leaveoneout.m 4KB
codedist_loss.m 2KB
smootherlssvm.m 1KB
postlssvm.m 5KB
rcrossvalidatelssvm.m 4KB
mae.m 281B
demomulticlass.m 2KB
demo_yinyang.m 3KB
rcrossvalidate.m 6KB
gridsearch.m 7KB
codedist_bay.m 2KB
windowize.m 2KB
code_OneVsAll.m 361B
prelssvm.m 6KB
kernel_matrix.m 3KB
crossvalidatelssvm.m 4KB
democonfint.m 2KB
gcrossvalidatelssvm.m 2KB
tunelssvm.m 23KB
kentropy.m 2KB
bay_optimize.m 6KB
crossvalidate.m 6KB
democlass.m 3KB
cilssvm.m 5KB
trimmedmse.m 2KB
changelssvm.m 5KB
linf.m 310B
latticeseq_b2.m 6KB
robustlssvm.m 2KB
misclass.m 684B
bitreverse32.m 1KB
MLP_kernel.m 603B
demo_fixedclass.m 2KB
bay_modoutClass.m 9KB
code_OneVsOne.m 576B
simann.m 5KB
poly_kernel.m 623B
linesearch.m 4KB
bay_initlssvm.m 2KB
bay_errorbar.m 6KB
plotlssvm.m 10KB
demomodel.m 5KB
RBF_kernel.m 1KB
latentlssvm.m 2KB
tbform.m 3KB
bay_lssvm.m 10KB
codedist_hamming.m 753B
denoise_kpca.m 4KB
medae.m 302B
simlssvm.m 6KB
AFEm.m 3KB
code.m 4KB
TestLSSVM.m 1KB
LSSVMC.zip 118KB
共 83 条
- 1
资源评论
- 2301_792468392023-10-30资源简直太好了,完美解决了当下遇到的难题,这样的资源很难不支持~
- 2301_768884392024-01-01非常有用的资源,有一定的参考价值,受益匪浅,值得下载。
机器学习之心
- 粉丝: 2w+
- 资源: 1026
下载权益
C知道特权
VIP文章
课程特权
开通VIP
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
最新资源
- Analytics-2024-11-07-080019.ips.ca.synced
- java订单管理系统源码ssh框架数据库 MySQL源码类型 WebForm
- DS18B20温度传感器STM32驱动头文件
- (源码)基于Spring Boot的二猫社区管理系统.zip
- content_1731501062882.xlsx
- javaswing游戏项目源码.zip
- Python实现收集器的源码.zip
- (源码)基于Java多线程的并发任务管理系统.zip
- C#在线网校考试系统源码 在线学习云服务平台源码数据库 SQL2008源码类型 WebForm
- com.sun.net.httpserver安卓HTTP服务器jar包最新版2.2.1
资源上传下载、课程学习等过程中有任何疑问或建议,欢迎提出宝贵意见哦~我们会及时处理!
点击此处反馈
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功