# coding=utf-8
from scipy.optimize import minimize
import numpy as np
class SVM():
def __init__(self,data,label,eps=0.00001):
self.data=data
self.label=label
self.eps=eps
def fun(self):
n=self.data.shape[0]
m=self.data.shape[1]
if(n!=len(self.label)):
print("error :data and label should be the same dimension!")
y=np.repeat(self.label.reshape(n,1),m,axis=1)
A=np.multiply(self.data,y)
A=np.dot(A,A.T)
v=lambda alpha: 1/2*np.dot(alpha,np.dot(A,alpha))-np.sum(alpha)
return v
def createfun(self,i):
return (lambda alpha:alpha[i])
def con(self):
C=10.0
cons=list()
#cons1=tuple()
y=self.label
for i in range(0,len(y)):
cons.append({'type': 'ineq', 'fun': self.createfun(i)})
#cons.append({'type': 'ineq', 'fun': lambda alpha:alpha[i]})
#cons.append({'type': 'ineq', 'fun': lambda alpha,i=i:alpha[i]})
#cons.append({'type': 'ineq', 'fun': lambda alpha: C-alpha[i]})
cons.append({'type': 'eq', 'fun': lambda alpha: np.dot(alpha,y)})
cons=tuple(cons)
#cons=({'type': 'ineq', 'fun': lambda x: x[0]},)+({'type': 'ineq', 'fun': lambda x: x[1]},)+({'type': 'ineq', 'fun': lambda x: x[2]},)+({'type': 'eq', 'fun': lambda x: np.dot(x,y)},)
return cons
def fit(self,x0):
cons=self.con()
res=minimize(self.fun(),x0,method='SLSQP',constraints=cons)
print(res.fun)
print(res.success)
print(res.x)
eps=self.eps
self.alpha=res.x
ay=np.multiply(self.alpha,self.label)
ay=ay.reshape(len(ay),1)
ay=np.repeat(ay,self.data.shape[1],axis=1)
ay_data=np.multiply(self.data,ay)
self.w=np.sum(ay_data,axis=0)
b=[]
sv=self.alpha>eps
self.sv=np.argwhere(sv)
y=self.label[sv]
x=self.data[sv,:]
for i in range(0,x.shape[0]):
bi=1/y[i]-np.dot(self.w,x[i,:])
b.append(bi)
self.b=np.mean(b)
def predict(self,x):
print('not implemented yet')
if __name__ == "__main__":
dataset=np.array([[3,3,1],[4,3,1],[1,1,-1],[1,2,-1]])
data=dataset[:,:-1]
label=dataset[:,-1]
svm=SVM(data,label)
x0=np.asarray((0,0,0,0))
svm.fit(x0)
x=[1,2]
svm.predict(x) #