支持向量机——非线性回归SVR

模型原型
sklearn.svm.SVR(kernel=’rbf’,degree=3,gamma=’auto’,coef0=0.0,tol=0.001,C=1.0,epsilon=0.1,shrinking=True, cache_size=200,verbose=False,max_iter=-1)
参数

  • kernel
  • degree
  • gamma
  • coef0
  • tol
  • C
  • epsilon
  • shrinking
  • cache_size
  • verbose
  • max_iter

属性

  • support_
  • support_vectors_
  • n_support_
  • dual_coef_
  • coef_
  • intercept_

方法
- fit(X,y)
- predict(X)
- score(X,y[,sample_weight])

import matplotlib.pyplot as plt
import numpy as np
from sklearn import datasets,linear_model,cross_validation,svm

加载数据

def load_data_regression():
    diabetes=datasets.load_diabetes()
    return cross_validation.train_test_split(diabetes.data,diabetes.target,test_size=0.25,random_state=0)

不同的核的影响

#线性核
def test_SVR_linear(*data):
    X_train,X_test,y_train,y_test=data
    regr=svm.SVR(kernel='linear')
    regr.fit(X_train,y_train)
    print('Coefficients:%s,\nintercept %s'%(regr.coef_,regr.intercept_))
    print('Score:%.2f'%regr.score(X_test,y_test))

X_train,X_test,y_train,y_test=load_data_regression()
test_SVR_linear(X_train,X_test,y_train,y_test)
#多项式核
def test_SVR_poly(*data):
    X_train,X_test,y_train,y_test=data
    fig=plt.figure()

    #测试degree
    degrees=range(1,20)
    train_scores=[]
    test_scores=[]
    for degree in degrees:
        regr=svm.SVR(kernel='poly',degree=degree,coef0=1)
        regr.fit(X_train,y_train)
        train_scores.append(regr.score(X_train,y_train))
        test_scores.append(regr.score(X_test,y_test))
    ax=fig.add_subplot(1,3,1)
    ax.plot(degrees,train_scores,label="Training score",marker='x')
    ax.plot(degrees,test_scores,label='Testing score',marker='o')
    ax.set_title('SVR_poly_degree r=1')
    ax.set_xlabel('p')
    ax.set_ylabel('score')
    ax.set_ylim(-1,1.)
    ax.legend(loc='best',framealpha=0.5)

    #测试gamma
    gammas=range(1,40)
    train_scores=[]
    test_scores=[]
    for gamma in gammas:
        regr=svm.SVR(kernel='poly',gamma=gamma,degree=3,coef0=1)
        regr.fit(X_train,y_train)
        train_scores.append(regr.score(X_train,y_train))
        test_scores.append(regr.score(X_test,y_test))
    ax=fig.add_subplot(1,3,2)
    ax.plot(gammas,train_scores,label='Training score',marker='+')
    ax.plot(gammas,test_scores,label='Testing score',marker='o')
    ax.set_title('SVR_poly_gamma r=1')
    ax.set_xlabel(r'$\gamma$')
    ax.set_ylabel('score')
    ax.set_ylim(-1,1)
    ax.legend(loc='best',framealpha=0.5)

    #测试r
    rs=range(20)
    train_scores=[]
    test_scores=[]
    for r in rs:
        regr=svm.SVR(kernel='poly',gamma=10,degree=3,coef0=r)
        regr.fit(X_train,y_train)
        train_scores.append(regr.score(X_train,y_train))
        test_scores.append(regr.score(X_test,y_test))
    ax=fig.add_subplot(1,3,3)
    ax.plot(rs,train_scores,label="Training score",marker='+')
    ax.plot(rs,test_scores,label='Testing scores',marker='o')
    ax.set_title('SVR_poly_r gamma=20 degree=3')
    ax.set_xlabel(r'r')
    ax.set_ylabel('score')
    ax.set_ylim(-1,1.)
    ax.legend(loc='best',framealpha=0.5)
    plt.show()

test_SVR_poly(X_train,X_test,y_train,y_test)
#高斯核
def test_SVR_rbf(*data):
    X_train,X_test,y_train,y_test=data
    gammas=range(1,20)
    train_scores=[]
    test_scores=[]
    for gamma in gammas:
        regr=svm.SVC(kernel='rbf',gamma=gamma)
        regr.fit(X_train,y_train)
        train_scores.append(regr.score(X_train,y_train))
        test_scores.append(regr.score(X_test,y_test))
    fig=plt.figure()
    ax=fig.add_subplot(1,1,1)
    ax.plot(gammas,train_scores,label="Training score",marker='+')
    ax.plot(gammas,test_scores,label='Testing score',marker='o')
    ax.set_title('SVC_rbf')
    ax.set_xlabel(r'$\gamma$')
    ax.set_ylabel('score')
    ax.set_ylim(-1,1.)
    ax.legend(loc='best',framealpha=0.5)
    plt.show()

test_SVR_rbf(X_train,X_test,y_train,y_test)
#sigmoid核
def test_SVR_sigmoid(*data):
    X_train,X_test,y_train,y_test=data
    fig=plt.figure()

    #测试gamma
    gammas=np.logspace(-1,3)
    train_scores=[]
    test_scores=[]
    for gamma in gammas:
        regr=svm.SVR(kernel='sigmoid',gamma=gamma,coef0=0.01)
        regr.fit(X_train,y_train)
        train_scores.append(regr.score(X_train,y_train))
        test_scores.append(regr.score(X_test,y_test))
    ax=fig.add_subplot(1,2,1)
    ax.plot(gammas,train_scores,label='Training score',marker='+')
    ax.plot(gammas,test_scores,label="testing score",marker='o')
    ax.set_title('SVR_sigmoid_gammas r=0.01')
    ax.set_xscale('log')
    ax.set_xlabel(r'$\gamma$')
    ax.set_ylabel('score')
    ax.set_ylim(-1,1.)
    ax.legend(loc='best',framealpha=0.5)

    #测试r
    rs=np.linspace(0,5)
    train_scores=[]
    test_scores=[]
    for r in rs:
        regr=svm.SVR(kernel='sigmoid',coef0=r,gamma=10)
        regr.fit(X_train,y_train)
        train_scores.append(regr.score(X_train,y_train))
        test_scores.append(regr.score(X_test,y_test))
    ax=fig.add_subplot(1,2,2)
    ax.plot(rs,train_scores,label="Training score",marker='+')
    ax.plot(rs,test_scores,label='Testing score',marker='o')
    ax.set_title('SVR_sigmoid_r gamma=10')
    ax.set_xlabel(r'r')
    ax.set_ylabel('score')
    ax.set_ylim(-1,1.)
    ax.legend(loc='best',framealpha=0.5)
    plt.show()

test_SVR_sigmoid(X_train,X_test,y_train,y_test)

你可能感兴趣的:(Python,机器学习)