function basis = kpca_calc(xs,kernel,d,kmataxis);
%KPCA_CALC calculates a kernel PCA basis.
%
% usage
% basis = kpca_calc(xs,kernel,d);
%
% input
% xs matrix of column vectors
% kernel a chosen kernel, default = {'gaussian',1}
% d number of eigenvectors (give for efficiency),
% default = size(xs,2)
% kmataxis is a figure handle where the kernel matrix will be
% plotted (default = 0 no plot)
%
% output
% basis struct containing the following entries
% basis.V eigenvectors
% basis.Lambda eigenvalues
% basis.xs used vectors
% basis.kernel used kernel
%
% see also
% kpca_plot, kpca_map
%
% STH * 12MAR2002
if ~exist('kernel')|isempty(kernel), kernel = {'gaussian',1}; end
if ~exist('d')|isempty(d), d = size(xs,2); end
if ~exist('kmataxis')|isempty(kmataxis), kmataxis = 0; end
% d can't be larger than the number of samples
if d>size(xs,2)
warning('d is larger than the number of samples, resetting d')
d = size(xs,2);
end
xsc = size(xs,2); % column of xs
% calculate the kernel matrix
K = kpca_matrix(xs,xs,kernel);
if kmataxis>0
cf = gcf;
figure(kmataxis)
imagesc(K)
figure(cf)
end
% center the kernel matrix
sk = size(K,1); % note, K is square matrix
rowK = sum(K)/sk; % the sums of the columns
allK = sum(K(:))/(sk*sk); % the sum of all entries
K = K - repmat(rowK,[sk 1]) - repmat(rowK',[1 sk]) + repmat(allK,[sk sk]);
% find the eigenvectors and eigenvalues
switch 2
case 1
[V,Lambda] = jdqr(K/sk,d);
case 2
opts.disp = 0;
[V,Lambda,flag] = eigs(K/sk,d,'LM',opts);
if flag
warning([mfilename ': not all eigenvalues converged'])
end
end
% we can not assume that the eigenvalues are sorted
[dummy, ind] = sort(-diag(Lambda));
Lambda = Lambda(ind,ind);
V = V(:,ind);
% due to numerical instabilities some eigenvalues might be negative
% or smaller than eps, we want to ignore those
valid = find(diag(Lambda)<2*eps);
if length(valid)<1
% all eigenvalues are valid, keep d unchanged
else
% some are not valid
d = valid(1)-1;
warning([mfilename ': some eigenvalues of kernel matrix are less than eps'])
end
clear valid
% cut off those eigenvalues and eigenvectors
V = V(:,1:d);
Lambda = Lambda(1:d,1:d);
% normalize the eigenvectors in feature space
V = V*inv(sqrtm(sk*Lambda));
% assign struct
basis.V = V;
basis.Lambda = Lambda;
basis.xs = xs;
basis.kernel = kernel;
没有合适的资源?快使用搜索试试~ 我知道了~
KFDA核Fisher鉴别
共9个文件
m:9个
4星 · 超过85%的资源 需积分: 26 101 下载量 107 浏览量
2010-06-21
22:14:51
上传
评论 1
收藏 6KB RAR 举报
温馨提示
采用了,核函数Fisher鉴别准则,分类。
资源推荐
资源详情
资源评论
收起资源包目录
KFDA.rar (9个子文件)
KFDA
GetFeature_KFDA.m 2KB
GetProjection.m 704B
Recognition.m 758B
kpca_matrix.m 1024B
GetScatter.m 800B
main.m 567B
ReadImage.m 1KB
kpca_map.m 738B
kpca_calc.m 3KB
共 9 条
- 1
资源评论
- zqwerfdsa2012-07-19麻烦标明是matlab,有没有C的?
- qqqq73459088qq2013-01-11对于初学者是不错的资料,只是,用于训练的数据需要自己来进行下载
- idea_chuxiao2015-05-20看不懂,是用于数据降维还是用于分类啊
wdmjx
- 粉丝: 1
- 资源: 3
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功