clc;
clear;
close all;
warning off;
addpath(genpath(pwd));
numsamples = 1000; % Number of randomly generated data points
mu1 = [-2 3]; % Mean vector 1
mu2 = [4 -5]; % Mean vector 2
sigma1 = [15 1 ; 0 5]; % Covariance matrix 1
sigma2 = [12 1 ; 1 10]; % Covariance matrix 2
prop = [7 3]; % Proportion of each distribution
prop = prop/sum(prop); % Normalize proportions as decimals constraint
x0 = [mu1(1),mu1(2);mu2(1),mu2(2);
sigma1(1,1),sigma1(1,2);sigma1(2,1),sigma1(2,2);
sigma2(1,1),sigma2(1,2);sigma2(2,1),sigma2(2,2);
prop(1),prop(2)];
% Define the dimensionality and the number of clusters to be estimated.
n = 2; % The vector lengths equal to the number of dimensions
k = 2; % The number of clusters.
% Define stopping criterion.
tol = 1e-6; % Stopping criteria 1: tolerance
maxiter = 5; % Stopping criteria 2: maximum number of iterations
dim1 = numsamples*prop(1); % Number of data points from each distribution
dim2 = numsamples*prop(2);
% Generate sample points with the specified means and covariance matrices.
R1 = chol(sigma1);
X1 = randn(dim1, 2) * R1; % Random normal (randn) number generator
X1 = X1 + repmat(mu1, size(X1, 1), 1); % n*1 repmat for same dims as X1
R2 = chol(sigma2);
X2 = randn(dim2, 2) * R2; % Random normal (randn) number generator
X2 = X2 + repmat(mu2, size(X2, 1), 1); % n*1 repmat for same dims as X2
global X
X = [X1; X2]; % Combine the two sets of random normal elements into one
m = size(X, 1); % The number of data points; rows of X
% Ensure positive-semidefinite constraint on covariance matrices
sigma1 = chol(sigma1)*chol(sigma1)';
sigma2 = chol(sigma2)*chol(sigma2)';
% Generate a 1,2 classification to use Classification Learner Toolbox
class1 = cat(2, X1, ones(size(X1,1), 1));
class2 = cat(2, X2, 2*ones(size(X2,1), 1));
Xclass = [class1;class2];
mu1g = [-4 , -2];
mu2g = [5 , -3];
mu = [mu1g;mu2g];
sigma{1} = [50, 5; 0.1, 50];
sigma{2} = [30, 0.1; 1, 3];
phi = [99 11];
phi = phi/sum(phi);
x0g = [mu1g; mu2g;
sigma{1}(1,:);sigma{1}(2,:);
sigma{2}(1,:);sigma{2}(2,:);
phi(1),phi(2)];
gridSize = 50;
maxgrid = max(max(mu1,mu2)) + max(max(max(sigma1,sigma2)));
u = linspace(-maxgrid, maxgrid, gridSize);
figure(1)
subplot(2,3,1);
hold off;
plot(X1(:, 1), X1(:, 2), 'g.');
hold on;
plot(X2(:, 1), X2(:, 2), 'm.');
[A , B] = meshgrid(u, u);
gridX = [A(:), B(:)];
% Calculate the Gaussian distribution for every value in the grid.
z1 = GaussianNormalDist(gridX, mu1, sigma1);
z2 = GaussianNormalDist(gridX, mu2, sigma2);
% Reshape the responses back into a 2D grid to be plotted with contour.
Z1 = reshape(z1, gridSize, gridSize);
Z2 = reshape(z2, gridSize, gridSize);
% Plot the contour lines (5) to show the pdf over the data.
contour(u, u, Z1, 5);
contour(u, u, Z2, 5);
daspect([1 1 1])
plot(mu1(1),mu1(2),'k.','MarkerSize',20) %plot original center X1
plot(mu2(1),mu2(2),'k.','MarkerSize',20) %plot original center X2
title('True PDFs');
figure(2)
subplot(2,1,1);
hold off
histogram(X);
hold on
title('Histogram of MV Normal Data')
xlabel('X'); ylabel('Frequency');
subplot(2,1,2);
hold off
surf([Z1;Z2])
hold on
title('MV PDF for Generated Data')
figure(1)
subplot(2,3,2);
hold off;
plot(X1(:, 1), X1(:, 2), 'g.');
hold on;
plot(X2(:, 1), X2(:, 2), 'm.');
% Calculate the Gaussian distribution for every value in the grid.
z1 = GaussianNormalDist(gridX, mu1g, sigma{1});
z2 = GaussianNormalDist(gridX, mu2g, sigma{2});
% Reshape the responses back into a 2D grid to be plotted with contour.
Z1 = reshape(z1, gridSize, gridSize);
Z2 = reshape(z2, gridSize, gridSize);
% Plot the contour lines (5) to show the pdf over the data.
contour(u, u, Z1, 5);
contour(u, u, Z2, 5);
daspect([1 1 1])
plot(mu1(1),mu1(2),'k.','MarkerSize',20) %plot original center X1
plot(mu2(1),mu2(2),'k.','MarkerSize',20) %plot original center X2
plot(mu1g(1),mu1g(2),'k*') %plot initial guess center X1
plot(mu2g(1),mu2g(2),'k*') %plot initial guess center X2
title('Initial Guess');
for iter = 1:maxiter
%Expectation
pdf = zeros(m, k);
% For each Gaussian...
for j = 1:k
% Evaluate the Gaussian for all data points for cluster 'j'.
pdf(:, j) = GaussianNormalDist(X, mu(j, :), sigma{j});
end
pdf_w = bsxfun(@times, pdf, phi);
W = bsxfun(@rdivide, pdf_w, sum(pdf_w, 2));
%Maximization
prevMu = mu;
% For each of the Gaussian...
for j = 1:k
prop_(j) = mean(W(:, j), 1);
weights = W(:,j);
val = weights' * X;
mu(j, :) = val ./ sum(weights, 1);
sigma_k = zeros(n, n);
% Subtract the cluster mean from all data points.
Xm = bsxfun(@minus, X, mu(j, :));
% Calculate the contribution of each training example to the covariance matrix.
for i = 1:m
sigma_k = sigma_k + (W(i, j) .* (Xm(i, :)' * Xm(i, :)));
end
% Divide by the sum of weights.
sigma{j} = sigma_k ./ sum(W(:, j));
end
% Check for convergence.
if abs(mu - prevMu) < tol
break
end
end
mu1_ = mu(1,:);
mu2_ = mu(2,:);
sigma1_ = sigma{1};
sigma2_ = sigma{2};
figure
subplot(2,3,4);
hold off;
plot(X1(:, 1), X1(:, 2), 'g.');
hold on;
plot(X2(:, 1), X2(:, 2), 'm.');
[A , B] = meshgrid(u, u);
gridX = [A(:), B(:)];
% Calculate the Gaussian response for every value in the grid.
z1_ = GaussianNormalDist(gridX, mu1_, sigma1_);
z2_ = GaussianNormalDist(gridX, mu2_, sigma2_);
% Reshape the responses back into a 2D grid to be plotted with contour.
Z1_ = reshape(z1_, gridSize, gridSize);
Z2_ = reshape(z2_, gridSize, gridSize);
% Plot the contour lines (5) to show the pdf over the data.
contour(u, u, Z1_, 5);
contour(u, u, Z2_, 5);
daspect([1 1 1])
plot(mu1(1),mu1(2),'k.','MarkerSize',20) %plot original center X1
plot(mu2(1),mu2(2),'k.','MarkerSize',20) %plot original center X2
plot(mu1_(1),mu1_(2),'k*') %plot new center X1
plot(mu2_(1),mu2_(2),'k*') %plot new center X2
title('EM Contours');
x0_ = [mu1_(1),mu1_(2); mu2_(1),mu2_(2);
sigma1_(1,1),sigma1_(1,2); sigma1_(2,1),sigma1_(2,2);
sigma2_(1,1),sigma2_(1,2); sigma2_(2,1),sigma2_(2,2);
prop_(1),prop_(2)];
options = optimset(...
'PlotFcns', {@optimplotfval,@optimplotfunccount},...
'MaxIter', 500,...
'MaxFunEvals', 5000,...
'TolFun', 1e-6,...
'TolX', 1e-6);
figure(3)
[optXval,optFval,optFlag,optOut] = fminsearch(@GMM_negloglik, x0_, options);
copyobj(get(gcf,'children'),figure(3));
hold on
xlabel('fminsearch iteration')
hold off
optXval(7,:) = optXval(7,:)/sum(optXval(7,:));
mu1_opt1 = optXval(1,:);
mu2_opt1 = optXval(2,:);
sigma1_opt1 = optXval(3:4,:);
sigma2_opt1 = optXval(5:6,:);
prop_opt1 = optXval(7,:);
options2 = optimset(...
'PlotFcns', {@optimplotfval,@optimplotfirstorderopt}, ...
'MaxIter', 5000,...
'MaxFunEvals', 1E6,...
'TolFun', 1e-15,...
'TolX', 1e-15);
figure(4)
[optXval2,optFval2,optFlag2,optOut2] = fminunc(@GMM_negloglik, x0_, options2);
copyobj(get(gcf,'children'),figure(4));
hold on
xlabel('fminunc iteration')
hold off
optXval2(7,:) = optXval2(7,:)/sum(optXval2(7,:));
mu1_opt2 = optXval2(1,:);
mu2_opt2 = optXval2(2,:);
sigma1_opt2 = optXval2(3:4,:);
sigma2_opt2 = optXval2(5:6,:);
prop_opt2 = optXval2(7,:);
figure(1)
subplot(2,3,5);
hold off;
plot(X1(:, 1), X1(:, 2), 'g.');
hold on;
plot(X2(:, 1), X2(:, 2), 'm.');
% Calculate the Gaussian response for every value in the grid.
z1_ = GaussianNormalDist(gridX, mu1_opt1, sigma1_opt1);
z2_ = Gau
没有合适的资源?快使用搜索试试~ 我知道了~
温馨提示
1.版本:matlab2021a,包含仿真操作录像,操作录像使用windows media player播放。 2.领域:EM-GMM局部最大似然估计 3.内容:基于期望最大化(EM)算法的GMM局部最大似然估计matlab仿真 4.注意事项:注意MATLAB左侧当前文件夹路径,必须是程序所在文件夹位置,具体可以参考视频录。
资源推荐
资源详情
资源评论
收起资源包目录
基于期望最大化(EM)算法的GMM局部最大似然估计matlab仿真.rar (13个子文件)
34.jpg 30KB
matlab
func
GMM_CleanVars.m 237B
GMM_Initialization.m 936B
GMM_InitialGuess.m 161B
GMM_CreatePlot.m 2KB
GaussianNormalDist.m 229B
GMM_Optim.m 526B
GMM_negloglik.m 449B
Runme.m 9KB
11.jpg 30KB
22.jpg 32KB
44.jpg 33KB
操作录像0039.avi 14.11MB
共 13 条
- 1
资源评论
fpga和matlab
- 粉丝: 15w+
- 资源: 2548
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功