【機器學習實戰】支援向量機----分類庫和簡單訓練mnist
阿新 • • 發佈:2018-12-31
前面已經對mnist資料集進行了讀取,現在我們可以直接使用sklearn.svm模組中的演算法庫對mnist資料集進行訓練。
【svm模組】
演算法庫:
sklearn.svm模組中提供了這些庫:
大概分成這幾類(除了svm_l1_min_c返回的是懲罰引數C的最低界)
由於這次的任務是分類,因此只需要熟悉分類型別的庫。
分類庫主要引數:
屬性:
方法:
和一般模型都一樣,訓練用的fit(),評分的score(),預測新樣本的predict(),還有計算樣本點到分離超平面的距離的decision_function()等等。
【簡單比較】
隨機生成一組類別為兩類,特徵維數為2的樣本。用聚類(blobs)生成資料。簡單的來比較一下三種模型。
##用於視覺化圖表
import matplotlib.pyplot as plt
##用於做科學計算
import numpy as np
##用於做資料分析
import pandas as pd
##用於載入資料或生成資料等
from sklearn import datasets
##載入svm模型
from sklearn import svm
##隨機生成一組特徵數量為2,樣本數量為80的資料集
X, y = datasets.make_blobs(n_samples=80, n_features=2 , centers=2, random_state=3)
fig=plt.figure(figsize=(10,8))
plt.xlim(-8,4) # 設定x軸刻度範圍
plt.ylim(-5,8) # 設定y軸刻度範圍
plt.scatter(X[:, 0], X[:, 1], c=y, s=30)
plt.show()
輸出為:
SVC:
##載入svm的svc模型,選擇線性核linear
model_svc=svm.SVC(kernel='linear')
model_svc.fit(X,y)
print("各類的支援向量在訓練樣本中的索引",model_svc.support_ )
print("各類所有的支援向量:\n",model_svc.support_vectors_)
print("各類各有多少個支援向量",model_svc.n_support_)
print("各特徵係數",model_svc.coef_)
print("截距",model_svc.intercept_)
print("各樣本點到分離超平面的距離:\n",model_svc.decision_function(X))
fig=plt.figure(figsize=(10,8))
plt.xlim(-8,4) # 設定x軸刻度範圍
plt.ylim(-5,8) # 設定y軸刻度範圍
##顯示分離超平面
w1=model_svc.coef_[:,0]
w2=model_svc.coef_[:,1]
b=model_svc.intercept_
x1=np.linspace(-8,6,2)
x2=(w1*x1+b)/(-1*w2)
x2_up=(w1*x1+b+1)/(-1*w2)
x2_down=(w1*x1+b-1)/(-1*w2)
plt.plot(x1,x2,'k-',linewidth=0.8)
plt.plot(x1,x2_up,'k--',linewidth=0.8)
plt.plot(x1,x2_down,'k--',linewidth=0.8)
##顯示樣本點和支援向量
plt.scatter(X[:, 0], X[:, 1], c=y,s=30)
plt.scatter(model_svc.support_vectors_[:, 0], model_svc.support_vectors_[:, 1],s=80,c='',edgecolors='b')
plt.show()
輸出為:
各類的支援向量在訓練樣本中的索引 [ 3 77 63]
各類所有的支援向量:
[[ 0.21219196 1.74387328]
[-1.23229972 3.89519459]
[-2.94843418 0.3655385 ]]
各類各有多少個支援向量 [2 1]
各特徵係數 [[-0.48951758 -0.32852537]]
截距 [-0.32333516]
各樣本點到分離超平面的距離:
[-1.96224709 1.96992652 1.61830594 -1.00011347 -3.03968748 -1.91355576
-3.20222196 1.07605938 1.39390527 1.19794817 -3.09852679 -2.99356435
1.83058651 2.46025289 1.84454041 -1.98203511 1.18207352 -2.21362739
-1.93596757 1.5062249 -3.13955464 -1.41328098 2.11163776 -2.0100733
1.23402066 -1.3997197 1.42460256 1.9676612 1.10767531 1.64961948
1.95638419 1.51193805 -1.2642258 2.06733658 1.99862207 1.49307471
-1.44123444 -1.54063897 2.21232256 3.39921728 1.08180429 1.72267793
-3.1813601 1.61914905 1.59985133 -1.70286262 -1.94181226 1.59417872
2.15236394 -2.64727844 -2.54908967 -1.45290411 -2.30745878 -2.58497233
2.2307059 -2.6951711 -2.96443813 -1.73637146 2.20696118 -1.77028229
-2.67467925 -1.60612382 2.59439321 0.99988654 -1.59570877 1.53629311
-2.69403494 1.44783106 -2.07984685 -1.3734872 1.09058746 1.60125344
1.76284029 -1.83576229 -1.90749178 -2.44163699 2.01923035 -0.99977302
2.01835361 -1.9910022 ]
LinearSVC:
##載入svm的LinearSVC模型
model_svc=svm.LinearSVC()
model_svc.fit(X,y)
print("各特徵係數",model_svc.coef_)
print("截距",model_svc.intercept_)
print("各樣本點到分離超平面的距離:\n",model_svc.decision_function(X))
fig=plt.figure(figsize=(10,8))
plt.xlim(-8,4) # 設定x軸刻度範圍
plt.ylim(-5,8) # 設定y軸刻度範圍
##顯示分離超平面
w1=model_svc.coef_[:,0]
w2=model_svc.coef_[:,1]
b=model_svc.intercept_
x1=np.linspace(-8,6,2)
x2=(w1*x1+b)/(-1*w2)
x2_up=(w1*x1+b+1)/(-1*w2)
x2_down=(w1*x1+b-1)/(-1*w2)
plt.plot(x1,x2,'k-',linewidth=0.8)
plt.plot(x1,x2_up,'k--',linewidth=0.8)
plt.plot(x1,x2_down,'k--',linewidth=0.8)
輸出為:
各特徵係數 [[-0.43861416 -0.34667016]]
截距 [-0.17688348]
各樣本點到分離超平面的距離:
[-1.93591621 1.872307 1.51362035 -0.8745027 -2.92994539 -1.76382924
-3.0775207 1.0402308 1.29830435 1.11379048 -2.9397959 -2.79957801
1.71628833 2.39356823 1.73192856 -1.92789445 1.09570976 -2.04453356
-1.82101909 1.42157242 -2.98776927 -1.29034971 2.02056919 -1.88101332
1.19082519 -1.30553441 1.40516363 1.87444179 0.9941246 1.64724365
1.90093077 1.43400662 -1.13754557 1.94923769 1.87255811 1.44711078
-1.30510946 -1.47459897 2.12234644 3.19807915 1.04507129 1.58898707
-3.04488637 1.48192786 1.52054919 -1.59992275 -1.81201583 1.48496013
2.06825982 -2.5122903 -2.33044584 -1.36944972 -2.11484281 -2.43412128
2.09712517 -2.51293175 -2.805706 -1.64870254 2.146865 -1.69267358
-2.59429744 -1.57785694 2.51734148 0.9896202 -1.54717774 1.50306243
-2.61137792 1.38376341 -1.94732572 -1.30281968 0.96579586 1.58099132
1.6427689 -1.79606234 -1.82193917 -2.28426594 1.91258642 -0.98672711
1.90616794 -1.79154947]
NuSVC:
##載入svm的NuSVC模型,nu為預設值0.5
model_svc=svm.NuSVC(kernel='linear')
#model_svc=svm.NuSVC(kernel='linear',nu=0.01)
model_svc.fit(X,y)
print("各類的支援向量在訓練樣本中的索引",model_svc.support_)
print("各類各有多少個支援向量",model_svc.n_support_)
print("各特徵係數",model_svc.coef_)
print("截距",model_svc.intercept_)
print("各樣本點到分離超平面的距離:\n",model_svc.decision_function(X))
fig=plt.figure(figsize=(10,8))
plt.xlim(-8,4) # 設定x軸刻度範圍
plt.ylim(-5,8) # 設定y軸刻度範圍
##顯示分離超平面
w1=model_svc.coef_[:,0]
w2=model_svc.coef_[:,1]
b=model_svc.intercept_
x1=np.linspace(-8,6,2)
x2=(w1*x1+b)/(-1*w2)
x2_up=(w1*x1+b+1)/(-1*w2)
x2_down=(w1*x1+b-1)/(-1*w2)
plt.plot(x1,x2,'k-',linewidth=0.8)
plt.plot(x1,x2_up,'k--',linewidth=0.8)
plt.plot(x1,x2_down,'k--',linewidth=0.8)
##顯示樣本點和支援向量
plt.scatter(X[:, 0], X[:, 1], c=y,s=30)
plt.scatter(model_svc.support_vectors_[:, 0], model_svc.support_vectors_[:, 1],s=80,c='',edgecolors='b')
plt.show()
輸出為:
各類的支援向量在訓練樣本中的索引 [ 0 3 5 18 21 25 32 36 37 45 46 51 57 59 61 64 69 73 74 77 79 2 7 8 9
16 19 24 26 28 31 35 40 43 44 47 63 65 67 70 71]
各類各有多少個支援向量 [21 20]
各特徵係數 [[-0.26852918 -0.18506518]]
截距 [-0.07402223]
各樣本點到分離超平面的距離:
[-1.00000001 1.18344728 0.98651753 -0.45373219 -1.59369374 -0.9613798
-1.68303355 0.69021961 0.86209936 0.75377706 -1.62199582 -1.56013705
1.10412131 1.46001562 1.11206673 -1.00846728 0.74471129 -1.12708414
-0.97711454 0.92581127 -1.64554153 -0.68461074 1.26315788 -1.017172
0.77771057 -0.67970603 0.886296 1.18259074 0.70066156 1.01348247
1.17979741 0.9296235 -0.60106054 1.23592282 1.1968279 0.92205787
-0.6989911 -0.76097669 1.31946142 1.97167963 0.69334256 1.04208873
-1.67029696 0.98397159 0.97856961 -0.84810873 -0.97900041 0.97262943
1.28653695 -1.3723103 -1.30974513 -0.7103885 -1.17727995 -1.33606029
1.32568017 -1.39466306 -1.54714741 -0.86822923 1.31923904 -0.88809099
-1.3926683 -0.80103249 1.53393157 0.65006995 -0.79334001 0.94736295
-1.4032617 0.89512436 -1.0557987 -0.66724352 0.69008091 0.9848262
1.0657701 -0.92815667 -0.96394481 -1.25544599 1.21013198 -0.46397868
1.20912877 -1. ]
【訓練mnist資料集】
讀取資料集:
import numpy as np
import struct
import matplotlib.pyplot as plt
import os
##載入svm模型
from sklearn import svm
###用於做資料預處理
from sklearn import preprocessing
import time
path='E:\\Desktop\\code_practice\\DATA'
def load_mnist_train(path, kind='train'):
labels_path = os.path.join(path,'%s-labels.idx1-ubyte'% kind)
images_path = os.path.join(path,'%s-images.idx3-ubyte'% kind)
with open(labels_path, 'rb') as lbpath:
magic, n = struct.unpack('>II',lbpath.read(8))
labels = np.fromfile(lbpath,dtype=np.uint8)
with open(images_path, 'rb') as imgpath:
magic, num, rows, cols = struct.unpack('>IIII',imgpath.read(16))
images = np.fromfile(imgpath,dtype=np.uint8).reshape(len(labels), 784)
return images, labels
def load_mnist_test(path, kind='t10k'):
labels_path = os.path.join(path,'%s-labels.idx1-ubyte'% kind)
images_path = os.path.join(path,'%s-images.idx3-ubyte'% kind)
with open(labels_path, 'rb') as lbpath:
magic, n = struct.unpack('>II',lbpath.read(8))
labels = np.fromfile(lbpath,dtype=np.uint8)
with open(images_path, 'rb') as imgpath:
magic, num, rows, cols = struct.unpack('>IIII',imgpath.read(16))
images = np.fromfile(imgpath,dtype=np.uint8).reshape(len(labels), 784)
return images, labels
train_images,train_labels=load_mnist_train(path)
test_images,test_labels=load_mnist_test(path)
標準化:
X=preprocessing.StandardScaler().fit_transform(train_images)
X_train=X[0:60000]
y_train=train_labels[0:60000]
定義並訓練模型:
SVC
print(time.strftime('%Y-%m-%d %H:%M:%S'))
model_svc = svm.SVC()
model_svc.fit(X_train,y_train)
print(time.strftime('%Y-%m-%d %H:%M:%S'))
輸出為:
2018-04-27 12:39:20
2018-04-27 12:50:25
用了11分鐘,真的慢T T
評分並預測:
x=preprocessing.StandardScaler().fit_transform(test_images)
x_test=x[0:10000]
y_pred=test_labels[0:10000]
print(model_svc.score(x_test,y_pred))
y=model_svc.predict(x_test)
輸出為:0.9657
評分還行,測試集顯示:
分別使用了SVC和LinearSVC,在預設引數的情況下它們的訓練情況:
類別 | LinearSVC | SVC |
---|---|---|
引數 | 預設 | 預設 |
訓練時間 | 約11分鐘 | 約8分鐘 |
準確率 | 0.9657 | 0.9114 |
下一步準備試試不同引數情況下SVC的分類情況。