# -*- coding: utf-8 -*-
import numpy as np
from sklearn.mixture import GMM
'''
图像特征表示
'''
class LEVEL_GMM:
def __init__(self, M, D):
self.n_component = M
self.weights = np.array([1.0 / M] * M)
self.means = np.random.random([M, D])
self.covars = np.tile(np.eye(D,D) * 0.1, (M, 1, 1))
self.converged = False
def fit(self, X, n_iter = 100, threshold = 0.0001):
for i in range(n_iter):
print "Iteration:", i+1
responsibilities = self.step_E(X, self.weights, self.means, self.covars)
weights_new, means_new, covars_new = self.step_M(X, responsibilities)
convergence = max( np.max(np.abs(weights_new - self.weights)),
np.max(np.abs(means_new - self.means)),
np.max(np.abs(covars_new - self.covars)) )
if convergence > threshold:
self.weights = weights_new
self.means = means_new
self.covars = covars_new
else:
self.converged = True
break
def step_E(self, X, weights, means, covars):
J, = X.shape
K, = X[0].weights_.shape
M = self.n_component
responsibilities = []
for j in range(J):
k_temp = []
for k in range(K):
m_temp = []
for m in range(M):
if np.linalg.det(covars[m]) == 0:
covars[m] = np.eye(D, D) * 0.1
Gauss = self.N_Dimension_Gaussian(X[j].means_[k], means[m], covars[m])
Gauss *= np.exp(-0.5 * np.trace(np.dot(np.linalg.inv(covars[m]), X[j].covars_[k]) ))
m_temp.append((Gauss ** (X[j].weights_[k])) * weights[m])
k_temp.append(m_temp)
responsibilities.append(k_temp)
responsibilities = np.array(responsibilities)
responsibilities /= (np.tile(np.sum(responsibilities, 2), [M, 1, 1]).transpose(1, 2, 0))
return responsibilities
def step_M(self, X, responsibilities):
J, K, M = responsibilities.shape
weights_new = self.get_weights_new(responsibilities)
Pi_jk = np.array([X[j].weights_ for j in range(J)])
Pi_jk.shape = (J, K, 1)
responsibilities /= np.tile(Pi_jk, [1, 1, M])
res_sum = np.sum(np.sum(responsibilities, 1), 0)
res_sum.shape = (1, 1, M)
responsibilities /= np.tile(res_sum, [J, K, 1])
means_new = self.get_means_new(X, responsibilities)
covars_new = self.get_covars_new(X, means_new, responsibilities)
return weights_new, means_new, covars_new
def get_weights_new(self, responsibilities):
J, K, M = responsibilities.shape
weights_new = np.sum(np.sum(responsibilities, 1), 0) / J / K
return weights_new
def get_means_new(self, X, responsibilities):
J, K, M = responsibilities.shape
K, D = X[0].means_.shape
means_jk = np.array([X[j].means_ for j in range(J)])
means_jk.shape = (J, K, D, 1)
means_jk = np.tile(means_jk, [1, 1, 1, M]).transpose(0, 1, 3, 2)
res_temp = responsibilities.copy()
res_temp.shape = (J, K, M, 1)
res_temp = np.tile(res_temp, [1, 1, 1, D])
means_new = np.sum(np.sum(res_temp * means_jk, 1), 0)
return means_new
def get_covars_new(self, X, means, responsibilities):
J, K, M = responsibilities.shape
M, D = means.shape
res_temp = responsibilities.copy()
res_temp.shape = (J, K, M, 1, 1)
res_temp = np.tile(res_temp, [1, 1, 1, D, D])
covars_jk = np.array([X[j].covars_ for j in range(J)])
covars_jk.shape = (J, K, D, D, 1)
covars_jk = np.tile(covars_jk, [1, 1, 1, 1, M]).transpose(0, 1, 4, 2, 3)
means_jk = np.array([X[j].means_ for j in range(J)])
means_jk.shape = (J, K, D, 1)
means_jk = np.tile(means_jk, [1, 1 ,1, M]).transpose(0, 1, 3, 2)
means_temp = means.copy()
means_temp = np.tile(means_temp, [J, K, 1, 1])
temp = means_jk - means_temp
temp.shape = (J, K, M, D, 1)
temp = np.tile(temp, [1, 1, 1, 1, D])
covars_temp = temp * temp.transpose(0, 1, 2, 4, 3)
covars_new = res_temp * (covars_jk + covars_temp)
covars_new = np.sum(np.sum(covars_new, 1), 0)
return covars_new
def N_Dimension_Gaussian(self, X, M, Cov):
Dimension = np.shape(X)[0]
Cov = np.float32(Cov)
Y = X-M
temp = np.dot(np.dot(Y, np.linalg.inv(Cov)), Y.T)
probability = (1.0/((2 * np.pi)**(Dimension / 2.0))) * (1.0/(np.linalg.det(Cov)**0.5)) * np.exp(-0.5 * temp)
return probability