%% 闲鱼号/面包多:深度学习与智能算法
function [model, b, X, Y] = trainlssvm(model, X, Y)
%% 训练模型
% >> model = trainlssvm(model)
% type can be 'classifier' or 'function estimation' (these strings
% can be abbreviated into 'c' or 'f', respectively). X and Y are
% matrices holding the training input and output data. The i-th
% data point is represented by the i-th row X(i,:) and Y(i,:). gam
% is the regularization parameter: for gam low minimizing of the
% complexity of the model is emphasized, for gam high, good fitting
% of the training data points is stressed. kernel_par is the
% parameter of the kernel; in the common case of an RBF kernel, a
% large sig2 indicates a stronger smoothing. The kernel_type
% indicates the function that is called to compute the kernel value
% (by default RBF_kernel). Other kernels can be used for example:
%
% The kernel parameter(s) are passed as a row vector, in the case
% no kernel parameter is needed, pass the empty vector!
%
% The training can either be proceeded by the preprocessing
% function ('preprocess') (by default) or not ('original'). The
% training calls the preprocessing (prelssvm, postlssvm) and the
% encoder (codelssvm) if appropiate.
%
% In the remainder of the text, the content of the cell determining
% the LS-SVM is given by {X,Y, type, gam, sig2}. However, the
% additional arguments in this cell can always be added in the
% calls.
%
% If one uses the object oriented interface (see also A.3.14), the training is done by
%
% >> model = trainlssvm(model)
% >> model = trainlssvm(model, X, Y)
%
% The status of the model checks whether a retraining is
% needed. The extra arguments X, Y allow to re-initialize the model
% with this new training data as long as its dimensions are the
% same as the old initiation.
%
% The training implementation:
%
% * The Matlab implementation: a straightforward implementation
% based on the matrix division '\' (lssvmMATLAB.m).
%
%
% This implementation allows to train a multidimensional output
% problem. If each output uses the same kernel type, kernel
% parameters and regularization parameter, this is
% straightforward. If not so, one can specify the different types
% and/or parameters as a row vector in the appropriate
% argument. Each dimension will be trained with the corresponding
% column in this vector.
% model = trainlssvm(model)
% model : Trained object oriented representation of the LS-SVM model
% model : Object oriented representation of the LS-SVM model
% X(*) : N x d matrix with the inputs of the training data
% Y(*) : N x 1 vector with the outputs of the training data
% type(*) : 'function estimation' ('f') or 'classifier' ('c')
% gam(*) : Regularization parameter
% sig2(*) : Kernel parameter (bandwidth in the case of the 'RBF_kernel')
% kernel(*) : Kernel type (by default 'RBF_kernel')
% preprocess(*) : 'preprocess'(*) or 'original'
%% 初始化模型
if iscell(model)
model = initlssvm(model{:});
end
%% 判断模型训练状态
if model.status(1) == 't'
if (nargout > 1)
X = model.xtrain;
Y = model.ytrain;
b = model.b;
model = model.alpha;
end
return
end
%% 控制输入
if ~((strcmp(model.kernel_type, 'RBF_kernel') && length(model.kernel_pars) >= 1) ||...
(strcmp(model.kernel_type, 'lin_kernel') && length(model.kernel_pars) >= 0) ||...
(strcmp(model.kernel_type, 'MLP_kernel') && length(model.kernel_pars) >= 2) ||...
(strcmp(model.kernel_type, 'poly_kernel') && length(model.kernel_pars) >= 1))
elseif (model.steps <= 0)
error('steps must be larger then 0');
elseif (model.gam <= 0)
error('gamma must be larger then 0');
elseif or(model.x_dim <= 0, model.y_dim <= 0)
error('dimension of datapoints must be larger than 0');
end
%% 编码(分类才需要)
if model.code(1) == 'c'
model = codelssvm(model);
end
%% 赋值
try
if model.prestatus(1)=='c'
changed = 1;
else
changed = 0;
end
catch
changed = 0;
end
if model.preprocess(1) == 'p' && changed
model = prelssvm(model);
elseif model.preprocess(1) == 'o' && changed
model = postlssvm(model);
end
%% 计时开始
tic;
%% 执行方式
if size(model.gam, 1) > 1
model.implementation = 'MATLAB';
end
%% 递归调用输出维度预测
if model.y_dim > 1
if (length(model.kernel_pars) == model.y_dim || size(model.gam, ...
2) == model.y_dim || numel(model.kernel_type, 2) == model.y_dim)
model = trainmultidimoutput(model);
if (nargout > 1)
X = model.xtrain;
Y = model.ytrain;
b = model.b;
model = model.alpha;
else
model.duration = toc;
model.status = 'trained';
end
return
end
end
%%
model = lssvmMATLAB(model);
if (nargout > 1)
X = model.xtrain;
Y = model.ytrain;
b = model.b;
model = model.alpha;
else
model.duration = toc;
model.status = 'trained';
end
%% 训练多输出模型
function model = trainmultidimoutput(model)
%% 初始化权重
model.alpha = zeros(model.nb_data, model.y_dim);
model.b = zeros(1, model.y_dim);
for d = 1 : model.y_dim
try
gam = model.gam(:, d);
catch
gam = model.gam(:);
end
try
sig2 = model.kernel_pars(:, d);
catch
sig2 = model.kernel_pars(:);
end
try
kernel = model.kernel_type{d};
catch
kernel=model.kernel_type;
end
[model.alpha(:, d), model.b(d)] = trainlssvm({model.xtrain, model.ytrain(:, d), ...
model.type, gam, sig2, kernel, 'original'});
end
%% 输出
if (nargout > 1)
model = model.alpha;
else
model.duration = toc;
model.status = 'trained';
end
机器学习之心
- 粉丝: 2w+
- 资源: 1070
最新资源
- (3822212)单片机Proteus仿真
- (18904838)学生宿舍管理系统 学生宿舍管理系统
- (174717862)有源滤波电路1-模电Multisim仿真实验
- (175734020)微信小程序商城源码,可基于此程序开发自己的微信小程序
- (175184616)(微信小程序毕业设计)十二神鹿点餐(外卖小程序)(源码+截图).zip
- (179742012)地级市-互联网普及率(2011-2022年)
- (6715020)员工工资管理系统源代码
- NVR-K51-BL-CN-V3.4.112-200617
- (180183624)chromedriver-谷歌131版本浏览器驱动.zip
- 盘式电机 maxwell 电磁仿真模型 双转单定结构,halbach 结构,双定单转 24 槽 20 极,18槽 1 2 极,18s16p(可做其他槽极配合) 参数化模型,内外径,叠厚等所有参数均可调
- (26198606)VUE.js高仿饿了么商城实战项目源码(未打包文件)
- (3913042)编译原理编译原理词法分析实验.rar
- (7964012)编译原理实验报告及源码
- (10675456)编译原理的词法分析语法分析
- (4427850)编译原理 词法分析器
- (3662218)学生宿舍管理系统数据库
资源上传下载、课程学习等过程中有任何疑问或建议,欢迎提出宝贵意见哦~我们会及时处理!
点击此处反馈