function [history, stopReason,k] = lars(yin, xin, XTX, type, stopCriterion, regularization, trace, quiet)
% %{
% This program implements "LARS" algorithm and its lasso modification
% introduced by Efron et. al. 2003. Read the paper to understand codes
% of this function. Each line of this file has corresponding equation
% number in Efron et. al. 2003 for reader's convenience.
%
%
% *** CAUTION
% history(1).mu_OLS contains original 'yin' to provide convinience in
% writing user defined stop criterion function. Actual mu_OLS of the first
% step should be just mean of yin which is a simple array of copy of
% history(1).mu. So, if history(1).mu_OLS contains that information, it is
% redundant. In fact, this is also contained in history(1).b, which is a
% bias of the output. Therefore, to provide more information to user who
% want to write his/her own stop criterion function, history(1).mu_OLS
% contains yin.
%
%
% Example 1: moderate size x.
% stopCrioterion = {};
% stopCrioterion{1,1} = 'maxKernels';
% stopCrioterion{1,2} = 100;
%
% XTX = lars_getXTX(x_original); % this takes long time.
% sol = lars(y, x, XTX,'lasso', stopCriterion);
%
% Example 2: very small size x, or a really really big size x
% stopCrioterion = {};
% stopCrioterion{1,1} = 'maxKernels';
% stopCrioterion{1,2} = 100;
%
% sol = lars(y, x,XTX, 'lasso', stopCriterion);
%
% Note:
% Users can add any kind of stop criterion by editing
% the corresponding portion of this file. See the code
% for existing examples.
%
% Note 2:
% This m-file does not implement routine for missing data.
% %}
%
global USING_CLUSTER;
% global RESOLUTION_OF_LARS;
global REGULARIZATION_FACTOR;
RESOLUTION_OF_LARS=0.0001;
lars_init();
regularization_factor = REGULARIZATION_FACTOR; % Tikhonov regularization factor (or the ridge regression factor)
% -> This should be small enough in this case to get
% a reasonable pseudoinverse.
stopReason = {};
%%% Check parameters
if isempty(yin) || isempty(xin)
warning('\nInput or Output has zero length.\n');
history.active_set = [];
stopReason{1} = 'Parameter error';
stopReason{2} = 0;
return;
end
if size(yin,1) ~= size(xin,1)
warning('\nSize of y does not match to that of x.\n');
history.active_set = [];
stopReason{1} = 'Parameter error';
stopReason{2} = 0;
return;
end
if ~strcmp(type, 'lasso') & ~strcmp(type, 'lars') & ~strcmp(type, 'forward_stepwise')
warning('\nUnknown type of regression.\n');
history.active_set = [];
stopReason{1} = 'Parameter error';
stopReason{2} = 0;
return;
end
if strcmp(type, 'forward_stepwise')
warning('\nForward_stepwise is not implemented.\n');
history.active_set = [];
stopReason{1} = 'Parameter error';
stopReason{2} = 0;
return;
end
if exist('regularization','var') && ~isempty(regularization)
regularization = 10;
else
regularization = 0;
end
if ~exist('trace','var') || isempty(trace)
trace=0;
end
if ~exist('quiet','var') || isempty(quiet)
quiet=0;
elseif quiet==1
trace=0;
end
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Data preparation
% Program automatically centers and standardizes predictors.
if ~exist('XTX','var')
XTX=[];
end
no_xtx = 0;
if ~isempty(XTX)
if ~quiet && trace >=0
fprintf('\nLars is using the provided xtx.\n');
end
elseif size(xin,2)^2 > 10^6
if ~quiet && trace >=0
fprintf('Too large matrix (size(x,2)^2 > 10^6). lars will not pre-calculate xtx.\n');
end
no_xtx = 1;
XTX = lars_getXTX(xin,no_xtx);
else
% fprintf('\nCalculating xtx.\n');
XTX = lars_getXTX(xin);
end
x = XTX.x; % normalized xin
mx = XTX.mx; % mean xin
sx = XTX.sx; % length of each column of xin
ignores = XTX.ignores; % indices for constant terms
all_candidate = XTX.all_candidate;% indices for all possible columns
if ~no_xtx
xtx = XTX.xtx; % xtx matrix
dup_columns = XTX.dup_columns; % duplicated columns which will be automatically ignored.
end
my = mean(yin);
y = yin-my;
n = size(x,1); % # of samples
m = size(x,2); % # of predictors
% Now, we can determine the maximum number of kernels.
%maxKernels = min(maxKernels, min(size(xin,1)-1, length(all_candidate)));
%maxKernels = min(maxKernels, min(rank(xin), length(all_candidate)));
existMaxKernels = 0;
existMSE = 0;
for is = 1:size(stopCriterion,1)
if strcmp(stopCriterion{is,1},'maxKernels')
existMaxKernels = 1;
stopCriterion{is,2} = min(stopCriterion{is,2}, min(size(xin,1)-1, length(all_candidate)));
% stopCriterion{is,2} = min(stopCriterion{is,2}, min(rank(xin), length(all_candidate)));
if stopCriterion{is,2}<1
warning('Max Kernel is less than 1. It must be larger than 0.\n');
stopCriterion{is,2} = 1;
end
end
if strcmp(stopCriterion{is,1},'MSE')
existMSE = 1;
if stopCriterion{is,2}<1.0e-10
warning('Maximum MSE is too small. Automatically set to 1.0e-10\n');
stopCriterion{is,2} = 1.0e-10;
end
end
end
if ~existMaxKernels
is = size(stopCriterion,1);
stopCriterion{is+1,1} = 'maxKernels';
% stopCriterion{is+1,2} = min(size(xin,1)-1, length(all_candidate)); % Stop when size of active set is data.maxKernels.
stopCriterion{is+1,2} = min(rank(xin), length(all_candidate)); % Stop when size of active set is data.maxKernels.
end
if ~existMSE
is = size(stopCriterion,1);
stopCriterion{is+1,1} = 'MSE';
stopCriterion{is+1,2} = 1.0e-10;
end
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
% Initialization
active = []; % active set
inactive = all_candidate; % inactive set
mu_a = zeros(n,1); % current estimate (eq. 2.8)
mu_a_plus = 0; % next estimate (eq. 2.12)
mu_a_OLS = 0; % OLS estimate (eq. 2.19)
beta = zeros(1,size(x,2));
beta_new = beta;
beta_OLS = beta;
history.active_set = [];
history.add = [];
history.drop = [];
history.beta_norm = [];
history.beta = [];
history.b = my;
history.mu = my;
history.beta_OLS_norm = [];
history.beta_OLS = [];
history.b_OLS = my;
history.mu_OLS = my*ones(size(yin));
history.MSE = sum(y.^2)/length(y);
history.R_square = 0;
history.resolution_warning = [];
if var(yin)==0
stopReason{1} = 'zeroVarY';
stopReason{2} = var(yin);
return;
end
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%% Main loop
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
c = 0; % correlation vector
C_max = max(abs(c));
C_max_ind = [];
C_max_ind_pl = [];
drop = []; % used for 'lasso'
k = 1; % iteration index
%if ~quiet && trace >= 0
% fprintf('Active predictors / total ::::: Current iteration\n ');
%end
while 1
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%% Exit Criterions
%%%%%%%%%%%%%%%%%%
没有合适的资源?快使用搜索试试~ 我知道了~
套索识别算法
共1个文件
m:1个
3星 · 超过75%的资源 需积分: 23 25 下载量 197 浏览量
2013-10-07
12:54:22
上传
评论 1
收藏 5KB RAR 举报
温馨提示
这是套索识别算法 matlab程序,可用于分类识别,里面只包含一个Lars.m文件
资源推荐
资源详情
资源评论
收起资源包目录
lars.rar (1个子文件)
lars.m 18KB
共 1 条
- 1
资源评论
- Littleseagull2013-10-22看着有点难的样子,应该不错,挺好的
- jsjjsyc2014-03-01代码不全,只有一个主函数,里面用到的辅助函数都没放进来
cici_lily
- 粉丝: 0
- 资源: 1
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
最新资源
- python tkinter-08-盒子模型.ev4.rar
- Doozy UI Manager 2023
- 基于matlab实现夜间车牌识别程序(1).rar
- 基于matlab实现无线传感器网络无需测距定位算法matlab源代码 包括apit,dv-hop,amorphous在内的共7个
- 基于python的yolov5实现的旋转目标检测
- 基于matlab实现无线传感器网络 CAB定位仿真程序 这是无线传感器节点定位CAB算法的仿真程序,由matlab完成.rar
- 基于matlab实现图像处理,本程序使用背景差分法对来往车辆进行检测和跟踪.rar
- 基于matlab实现视频监控中车型识别代码,自己写的,希望和大家多多交流.rar
- springcodespringcodespringcodespringcode
- 基于matlab实现权值的MAXDEV无线传感器网络定位算法研究 MAXDEV 无线传感器 定位 算法.rar
资源上传下载、课程学习等过程中有任何疑问或建议,欢迎提出宝贵意见哦~我们会及时处理!
点击此处反馈
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功