手写SVM(SMO)实现对马疝病数据集的分类

   日期:2020-08-09     浏览:121    评论:0    
核心提示:本篇博客主要是对SVM系列学习的一个实践。手写SVM来简单地对马疝病数据集进行分类。

SVM是一个很庞杂的体系,前面我从以下几个方面分别讨论了SVM的大致原理:

  1. 机器学习之SVM(Hinge Loss+Kernel Trick)原理推导与解析
  2. 强对偶性、弱对偶性以及KKT条件的证明(对偶问题的几何证明)
  3. 核方法概述----正定核以及核技巧(Gram矩阵推导正定核)
  4. 手推序列最小优化(sequential minimal optimization,SMO)算法
  5. 再谈SVM(hard-margin和soft-margin详细推导、KKT条件、核技巧)

本篇博文主要是对SVM系列博客的一个实践,手写SVM来简单地对马疝病数据集进行分类。
代码:

import pandas as pd
import numpy as np
from sklearn import svm

class SMOStruct:
    def __init__(self, X, y, C, kernel, toler):
        self.N = X.shape[0]
        self.X = X  # 训练样本集合
        self.y = y # 类别 label
        self.C = C  # regularization parameter惩罚项
        self.kernel = kernel  # kernel function 核函数,实现了两个核函数,线性和高斯(RBF)
        self.b = 0  # scalar bias term 标量,偏移量
        self.E = -1 * self.y
        self.toler = toler
        self.lambdas = np.zeros(self.N)  # lagrange multiplier 拉格朗日乘子,与样本一一相对
        self.K = np.zeros((self.N, self.N))    #存储K矩阵(核函数值)
        # 训练样本的个数和每个样本的features数量
        for i in range(self.N):
            for j in range(self.N):
                self.K[i, j] = Kernel(X[i].T, X[j].T, kernel)


def Kernel(xi, xj, kernel):
    if kernel[0] == 'linear':    #线性核
        res = np.dot(xi.T, xj)
    elif kernel[0] == 'rbf':      #高斯核
        sum = 0.0
        for i in range(len(xi)):
            sum += (xi[i] - xj[i]) ** 2
        res = np.exp(-sum / (2 * kernel[1] ** 2))
    elif kernel[0] == 'poly':   #多项式核
        res = np.dot(xi.T, xj)
        res = res ** kernel[1]
    elif kernel[0] == 'laplace':    #拉普拉斯核
        sum = 0.0
        for i in range(len(xi)):
            sum += (xi[i] - xj[i]) ** 2
            sum = sum ** 0.5
        res = np.exp(-sum / kernel[1])
    elif kernel[0] == 'sigmoid':
        res = np.dot(xi.T, xj)
        res = np.tanh(kernel[1] * res + kernel[2])

    return res

def kkt(model, i):
    Ei = cacEi(model, i)
    if ((model.y[i]*Ei < -model.toler) and (model.lambdas[i] < model.C)) or ((model.y[i]*Ei > model.toler) and (model.lambdas[i] > 0)):
        return True   #违反KKT条件
    else:
        return False

#选择第一个优化变量
def outerLoop(model):
    # 0 < lambda < C
    for i in range(model.N):
        if kkt(model, i):   #违反kkt条件
            return i
    # return np.random.randint(0,model.N)

#寻找第二个优化变量
def inner_loop(model, i):
    E1 = model.E[i]
    max_diff = 0
    j = None
    E = np.nonzero(model.E)[0]
    if len(E) > 1:
        for i in E:
                # 与indx不相等
            if i == j:
                continue
            diff = abs(model.E[i] - E1)
                # print("diff",diff)
            if diff > max_diff:
                max_diff = diff
                j = i
    return j

#计算f(xk)
def calfx(model, k):
    sum = 0.0
    for i in range(model.N):
        sum += (model.lambdas[i] * model.y[i] * model.K[i, k])
    sum += model.b
    return sum

#计算Ek for i in range(N): lambda_i*y_i*K(i,k)
def cacEi(model, k):
    return calfx(model, k) - float(model.y[k])

#核心程序
def update(model, i, j):
    print(i, j)
    lambdai_old = model.lambdas[i]
    lambdaj_old = model.lambdas[j]
    if model.y[i] != model.y[j]:
        L = max(0.0, model.lambdas[j] - model.lambdas[i])
        H = min(model.C, model.C + model.lambdas[j] - model.lambdas[i])
    else:
        L = max(0.0, model.lambdas[j] + model.lambdas[i] - model.C)
        H = min(model.C, model.lambdas[j] + model.lambdas[i])
    if L == H:
        return 0
    eta = model.K[i, i] + model.K[j, j] - 2.0 * model.K[i, j]
    if eta <= 0:
        return 0
    lambdaj_new_unc = lambdaj_old + model.y[j] * (model.E[i] - model.E[j]) / eta
    lambdaj_new = clipBonder(L, H, lambdaj_new_unc)
    lambdai_new = lambdai_old + model.y[i] * model.y[j] * (lambdaj_old - lambdaj_new)
    #更新参数lambda
    model.lambdas[i] = lambdai_new
    model.lambdas[j] = lambdaj_new

    #更新阈值b
    b1 = model.b - model.E[i] - model.y[i] * (lambdai_new - lambdai_old) * model.K[i, i] - model.y[j] * (
                lambdaj_new - lambdaj_old) * model.K[i, j]
    b2 = model.b - model.E[j] - model.y[i] * (lambdai_new - lambdai_old) * model.K[i, j] - model.y[j] * (
                lambdaj_new - lambdaj_old) * model.K[j, j]

    if model.lambdas[i] > 0 and model.lambdas[i] < model.C:
        model.b = b1
    elif model.lambdas[j] > 0 and model.lambdas[j] < model.C:
        model.b = b2
    else:
        model.b = (b1 + b2) / 2.0
    #更新两个E
    sum1 = 0.0
    sum2 = 0.0
    for k in range(model.N):
        if model.lambdas[k] > 0 and model.lambdas[k] < model.C:
            sum1 += (model.y[k] * model.lambdas[k] * model.K[i, k])
            sum2 += (model.y[k] * model.lambdas[k] * model.K[j, k])

    model.E[i] = sum1 + model.b - model.y[i]
    model.E[j] = sum2 + model.b - model.y[j]

#计算w
def calW(lambdas, X, y):
    m, n = X.shape
    w = np.zeros((n, 1))
    for i in range(n):
        for j in range(m):
            w[i] += (lambdas[j] * y[j] * X[j, i])
    return w

#裁剪
def clipBonder(L, H, lambda_):
    if lambda_ > H:
        return H
    elif lambda_ <= H and lambda_ >= L:
        return lambda_
    else:
        return L

#smo主程序
def smo_main(C, kernel, toler):
    X, y = load_data('horse_colic/horseColicTraining.txt')
    model = SMOStruct(X, y, C, kernel, toler)
    max_step = 100
    while max_step > 0:
        for i in range(model.N):
            if kkt(model, i):
                j = inner_loop(model, i)
                update(model, i, j)
        max_step -= 1

    w = calW(model.lambdas, model.X, model.y)
    return model.lambdas, w, model.b

#加载数据
def load_data(path):
    data = pd.read_csv(path, sep='\t', names=[i for i in range(22)])
    data = np.array(data).tolist()
    x = []; y = []
    for i in range(len(data)):
        y.append(data[i][-1])
        del data[i][-1]
        x.append(data[i])

    for i in range(len(y)):
        if y[i] == 0:
            y[i] = -1
    # for i in range(len(x)):
    # for j in range(len(x[i])):
    # x[i][j] = (x[i][j] - min(x[i])) / (max(x[i]) - min(x[i]))
    x = np.array(x)
    y = np.array(y).reshape(len(y), )

    return x, y

def SVM():
    C = 0.75
    toler = 0.00001
    kernel = ['poly', 3]
    lambdas, w, b = smo_main(C, kernel, toler)
    test_x, test_y = load_data('horse_colic/horseColicTest.txt')
    sum = 0
    for i in range(len(test_y)):
        res = np.dot(w.T, test_x[i, :]) + b
        res = np.sign(res)
        if res == test_y[i]:
            sum += 1

    print('手写:', sum / len(test_y))

def sklearn_svm():
    X, y = load_data('horse_colic/horseColicTraining.txt')
    clf = svm.SVC()
    clf.fit(X, y)
    test_x, test_y = load_data('horse_colic/horseColicTest.txt')
    print('调包:', clf.score(test_x, test_y))
    
    
if __name__ == '__main__':
      SVM()
      sklearn_svm()
 
打赏
 本文转载自:网络 
所有权利归属于原作者,如文章来源标示错误或侵犯了您的权利请联系微信13520258486
更多>最近资讯中心
更多>最新资讯中心
0相关评论

推荐图文
推荐资讯中心
点击排行
最新信息
新手指南
采购商服务
供应商服务
交易安全
关注我们
手机网站:
新浪微博:
微信关注:

13520258486

周一至周五 9:00-18:00
(其他时间联系在线客服)

24小时在线客服