1. 程式人生 > >機器學習之線性迴歸SVR

機器學習之線性迴歸SVR

  • 機器學習之線性迴歸SVR
# -*- coding: utf-8 -*-
"""
Created on Sun Dec  2 09:53:01 2018

@author: muli
"""

import matplotlib.pyplot as plt
import numpy as np
from sklearn import datasets,cross_validation,svm

def load_data_regression():
    '''
    載入用於迴歸問題的資料集

    :return: 一個元組,用於迴歸問題。
    元組元素依次為:訓練樣本集、測試樣本集、訓練樣本集對應的值、測試樣本集對應的值
    '''
    #使用 scikit-learn 自帶的一個糖尿病病人的資料集
    diabetes = datasets.load_diabetes() 
    # 拆分成訓練集和測試集,測試集大小為原始資料集大小的 1/4
    return cross_validation.train_test_split(diabetes.data,diabetes.target,
		test_size=0.25,random_state=0)
    
    
def test_LinearSVR(*data):
    '''
    測試 LinearSVR 的用法

    :param data: 可變引數。它是一個元組,這裡要求其元素依次為:訓練樣本集、測試樣本集、訓練樣本的值、測試樣本的值
    :return: None
    '''
    X_train,X_test,y_train,y_test=data
    regr=svm.LinearSVR()
    regr.fit(X_train,y_train)
    print('Coefficients:%s, intercept %s'%(regr.coef_,regr.intercept_))
    print('Score: %.2f' % regr.score(X_test, y_test))


def test_LinearSVR_loss(*data):
    '''
   測試 LinearSVR 的預測效能隨不同損失函式的影響

    :param data:  可變引數。它是一個元組,這裡要求其元素依次為:訓練樣本集、測試樣本集、訓練樣本的值、測試樣本的值
    :return:
    '''
    X_train,X_test,y_train,y_test=data
    losses=['epsilon_insensitive','squared_epsilon_insensitive']
    for loss in losses:
        regr=svm.LinearSVR(loss=loss)
        regr.fit(X_train,y_train)
        print("loss:%s"%loss)
        print('Coefficients:%s, intercept %s'%(regr.coef_,regr.intercept_))
        print('Score: %.2f' % regr.score(X_test, y_test))


def test_LinearSVR_epsilon(*data):
    '''
    測試 LinearSVR 的預測效能隨 epsilon 引數的影響

    :param data:  可變引數。它是一個元組,這裡要求其元素依次為:訓練樣本集、測試樣本集、訓練樣本的值、測試樣本的值
    :return: None
    '''
    X_train,X_test,y_train,y_test=data
    # 等比數列
    epsilons=np.logspace(-2,2)
    train_scores=[]
    test_scores=[]
    for  epsilon in  epsilons:
        regr=svm.LinearSVR(epsilon=epsilon,loss='squared_epsilon_insensitive')
        regr.fit(X_train,y_train)
        train_scores.append(regr.score(X_train, y_train))
        test_scores.append(regr.score(X_test, y_test))
    fig=plt.figure()
    ax=fig.add_subplot(1,1,1)
    ax.plot(epsilons,train_scores,label="Training score ",marker='+' )
    ax.plot(epsilons,test_scores,label= " Testing  score ",marker='o' )
    ax.set_title( "LinearSVR_epsilon ")
    ax.set_xscale("log")
    ax.set_xlabel(r"$\epsilon$")
    ax.set_ylabel("score")
    ax.set_ylim(-1,1.05)
    ax.legend(loc="best",framealpha=0.5)
    plt.show()
    

def test_LinearSVR_C(*data):
    '''
    測試 LinearSVR 的預測效能隨 C 引數的影響

    :param data:  可變引數。它是一個元組,這裡要求其元素依次為:訓練樣本集、測試樣本集、訓練樣本的值、測試樣本的值
    :return: None
    '''
    X_train,X_test,y_train,y_test=data
    Cs=np.logspace(-1,2)
    train_scores=[]
    test_scores=[]
    for  C in  Cs:
        regr=svm.LinearSVR(epsilon=0.1,loss='squared_epsilon_insensitive',C=C)
        regr.fit(X_train,y_train)
        train_scores.append(regr.score(X_train, y_train))
        test_scores.append(regr.score(X_test, y_test))
    fig=plt.figure()
    ax=fig.add_subplot(1,1,1)
    ax.plot(Cs,train_scores,label="Training score ",marker='+' )
    ax.plot(Cs,test_scores,label= " Testing  score ",marker='o' )
    ax.set_title( "LinearSVR_C ")
    ax.set_xscale("log")
    ax.set_xlabel(r"C")
    ax.set_ylabel("score")
    ax.set_ylim(-1,1.05)
    ax.legend(loc="best",framealpha=0.5)
    plt.show()


if __name__=="__main__":
    X_train,X_test,y_train,y_test=load_data_regression() # 生成用於迴歸問題的資料集
#    test_LinearSVR(X_train,X_test,y_train,y_test) # 呼叫 test_LinearSVR
#    test_LinearSVR_loss(X_train,X_test,y_train,y_test) # 呼叫 test_LinearSVR_loss
#    test_LinearSVR_epsilon(X_train,X_test,y_train,y_test) # 呼叫 test_LinearSVR_epsilon
    test_LinearSVR_C(X_train,X_test,y_train,y_test) # 呼叫 test_LinearSVR_C

  • 如圖:

muli