function [eigvector, eigvalue] = LDA(gnd,options,data)
% LDA: Linear Discriminant Analysis
%
% [eigvector, eigvalue] = LDA(gnd, options, data)
%
% Input:
% data - Data matrix. Each row vector of fea is a data point.
% gnd - Colunm vector of the label information for each
% data point.
% options - Struct value in Matlab. The fields in options
% that can be set:
%
% Regu - 1: regularized solution,
% a* = argmax (a'X'WXa)/(a'X'Xa+ReguAlpha*I)
% 0: solve the sinularity problem by SVD
% Default: 0
%
% ReguAlpha - The regularization parameter. Valid
% when Regu==1. Default value is 0.1.
%
% ReguType - 'Ridge': Tikhonov regularization
% 'Custom': User provided
% regularization matrix
% Default: 'Ridge'
% regularizerR - (nFea x nFea) regularization
% matrix which should be provided
% if ReguType is 'Custom'. nFea is
% the feature number of data
% matrix
% Fisherface - 1: Fisherface approach
% PCARatio = nSmp - nClass
% Default: 0
%
% PCARatio - The percentage of principal
% component kept in the PCA
% step. The percentage is
% calculated based on the
% eigenvalue. Default is 1
% (100%, all the non-zero
% eigenvalues will be kept.
% If PCARatio > 1, the PCA step
% will keep exactly PCARatio principle
% components (does not exceed the
% exact number of non-zero components).
%
%
% Output:
% eigvector - Each column is an embedding function, for a new
% data point (row vector) x, y = x*eigvector
% will be the embedding result of x.
% eigvalue - The sorted eigvalue of LDA eigen-problem.
% elapse - Time spent on different steps
%
% Examples:
%
% fea = rand(50,70);
% gnd = [ones(10,1);ones(15,1)*2;ones(10,1)*3;ones(15,1)*4];
% options = [];
% options.Fisherface = 1;
% [eigvector, eigvalue] = LDA(gnd, options, fea);
% Y = fea*eigvector;
%
%
% See also LPP, constructW, LGE
%
%
%
%Reference:
%
% Deng Cai, Xiaofei He, Jiawei Han, "SRDA: An Efficient Algorithm for
% Large Scale Discriminant Analysis", IEEE Transactions on Knowledge and
% Data Engineering, 2007.
%
% Deng Cai, "Spectral Regression: A Regression Framework for
% Efficient Regularized Subspace Learning", PhD Thesis, Department of
% Computer Science, UIUC, 2009.
%
% version 3.0 --Dec/2011
% version 2.1 --June/2007
% version 2.0 --May/2007
% version 1.1 --Feb/2006
% version 1.0 --April/2004
%
% Written by Deng Cai (dengcai AT gmail.com)
%
MAX_MATRIX_SIZE = 1600; % You can change this number according your machine computational power
EIGVECTOR_RATIO = 0.1; % You can change this number according your machine computational power
if (~exist('options','var'))
options = [];
end
if ~isfield(options,'Regu') || ~options.Regu
bPCA = 1;
if ~isfield(options,'PCARatio')
options.PCARatio = 1;
end
else
bPCA = 0;
if ~isfield(options,'ReguType')
options.ReguType = 'Ridge';
end
if ~isfield(options,'ReguAlpha')
options.ReguAlpha = 0.1;
end
end
% ====== Initialization
[nSmp,nFea] = size(data);
if length(gnd) ~= nSmp
error('gnd and data mismatch!');
end
classLabel = unique(gnd);
nClass = length(classLabel);
Dim = nClass - 1;
if bPCA && isfield(options,'Fisherface') && options.Fisherface
options.PCARatio = nSmp - nClass;
end
if issparse(data)
data = full(data);
end
sampleMean = mean(data,1);
data = (data - repmat(sampleMean,nSmp,1));
bChol = 0;
if bPCA && (nSmp > nFea+1) && (options.PCARatio >= 1)
DPrime = data'*data;
DPrime = max(DPrime,DPrime');
[R,p] = chol(DPrime);
if p == 0
bPCA = 0;
bChol = 1;
end
end
%======================================
% SVD
%======================================
if bPCA
[U, S, V] = mySVD(data);
[U, S, V]=CutonRatio(U,S,V,options);
eigvalue_PCA = full(diag(S));
data = U;
eigvector_PCA = V*spdiags(eigvalue_PCA.^-1,0,length(eigvalue_PCA),length(eigvalue_PCA));
else
if ~bChol
DPrime = data'*data;
% options.ReguAlpha = nSmp*options.ReguAlpha;
switch lower(options.ReguType)
case {lower('Ridge')}
for i=1:size(DPrime,1)
DPrime(i,i) = DPrime(i,i) + options.ReguAlpha;
end
case {lower('Tensor')}
DPrime = DPrime + options.ReguAlpha*options.regularizerR;
case {lower('Custom')}
DPrime = DPrime + options.ReguAlpha*options.regularizerR;
otherwise
error('ReguType does not exist!');
end
DPrime = max(DPrime,DPrime');
end
end
[nSmp,nFea] = size(data);
Hb = zeros(nClass,nFea);
for i = 1:nClass,
index = find(gnd==classLabel(i));
classMean = mean(data(index,:),1);
Hb (i,:) = sqrt(length(index))*classMean;
end
if bPCA
[dumpVec,eigvalue,eigvector] = svd(Hb,'econ');
eigvalue = diag(eigvalue);
eigIdx = find(eigvalue < 1e-3);
eigvalue(eigIdx) = [];
eigvector(:,eigIdx) = [];
eigvalue = eigvalue.^2;
eigvector = eigvector_PCA*eigvector;
else
WPrime = Hb'*Hb;
WPrime = max(WPrime,WPrime');
dimMatrix = size(WPrime,2);
if Dim > dimMatrix
Dim = dimMatrix;
end
if isfield(options,'bEigs')
bEigs = options.bEigs;
else
if (dimMatrix > MAX_MATRIX_SIZE) && (ReducedDim < dimMatrix*EIGVECTOR_RATIO)
bEigs = 1;
else
bEigs = 0;
end
end
if bEigs
%disp('use eigs to speed up!');
option = struct('disp',0);
if bChol
option.cholB = 1;
[eigvector, eigvalue] = eigs(WPrime,R,Dim,'la',option);
else
[eigvector, eigvalue] = eigs(WPrime,DPrime,Dim,'la',option);
end
eigvalue = diag(eigvalue);
else
[eigvector, eigvalue] = eig(WPrime,DPrime);
eigvalue = diag(eigvalue);
[junk, index] = sort(-eigvalue);
eigvalue = eigvalue(index);
eigvector = eigvector(:,index);
if Dim < size(eigvector,2)
eigvector = eigvector(:, 1:Dim);
eigvalue = eigvalue(1:Dim);
end
end
end
for i = 1:size(eigvector,2)
eigvector(:,i) = eigvector(:,i)./norm(eigvector(:,i));
end
function [U, S, V]=CutonRatio(U,S,V,options)
if ~isfield(options, 'PCARatio')
LDA.rar_U7Y_URLM_将维_线性判别分析_维数约简
版权申诉
174 浏览量
2022-09-20
10:36:53
上传
评论
收藏 3KB RAR 举报
邓凌佳
- 粉丝: 65
- 资源: 1万+
最新资源
- 筷手引流工具.apk
- 论文(最终)_20240430235101.pdf
- 基于python编写的Keras深度学习框架开发,利用卷积神经网络CNN,快速识别图片并进行分类
- 最全空间计量实证方法(空间杜宾模型和检验以及结果解释文档).txt
- 5uonly.apk
- 蓝桥杯Python组的历年真题
- 2023-04-06-项目笔记 - 第一百十九阶段 - 4.4.2.117全局变量的作用域-117 -2024.04.30
- 2023-04-06-项目笔记 - 第一百十九阶段 - 4.4.2.117全局变量的作用域-117 -2024.04.30
- 前端开发技术实验报告:内含4四实验&实验报告
- Highlight Plus v20.0.1
资源上传下载、课程学习等过程中有任何疑问或建议,欢迎提出宝贵意见哦~我们会及时处理!
点击此处反馈