机器学习之KNN算法,朴素贝叶斯,决策树,SVM算法比较

KNN算法

knn = KNeighborsClassifier ( )

朴素贝叶斯

gnb = GaussianNB ( )

决策树

dtc = DecisionTreeClassifier ( )

SVM算法

svm = SVC ()

代码:

import numpy as np
import matplotlib.pyplot as plt
from itertools import product
from sklearn import datasets
from sklearn.neighbors import KNeighborsClassifier
from sklearn.naive_bayes import GaussianNB
from sklearn.tree import DecisionTreeClassifier
from sklearn.svm import SVC
# 加载数据
wine = datasets.load_wine ( )
x_train = wine.data [ : , [ 0 , 2 ] ]
y_train = wine.target
# 定义分类器对象,均使用默认参数
knn = KNeighborsClassifier ( )
gnb = GaussianNB ( )
dtc = DecisionTreeClassifier ( )
svm = SVC ()
# 训练分类器
knn.fit ( x_train , y_train )
gnb.fit ( x_train , y_train )
dtc.fit ( x_train , y_train )
svm.fit ( x_train , y_train )
# 测试并输出准确率
print ( 'KNN:' , knn.score ( x_train , y_train ))
print ( 'GaussianNB:' , gnb.score ( x_train , y_train ))
print ( 'Decision Tree:' , dtc.score ( x_train , y_train ))
print ( 'Support Vector Machine:' , svm.score ( x_train , y_train ))
# 获取测试点范围
x_min , x_max = x_train[:,0].min()-1 , x_train [:,0].max()+1
y_min , y_max = x_train [ : , 1 ].min ( ) - 1 , x_train [ : , 1 ].max ( ) + 1
# 生成测试点
xx , yy = np.meshgrid ( np.arange ( x_min , x_max , 0.1 ) , np.arange ( y_min , y_max , 0.1 ) )
# 设置子图
f , axe = plt.subplots ( 2 , 2 , sharex = 'col' , sharey = 'row' , figsize = ( 10 , 8 ) )
# 测试全部测试点并将分类结果作为颜色参数绘制结果图
for idx , clf , tt in zip ( product([0,1],[0,1]) , [ knn , gnb , dtc , svm] , [ 'KNN' , 'GaussianNB' , 'Decision Tree' , 'Support Vector Machine' ] ) :
    Z = clf.predict ( np.c_[ xx.ravel ( ) , yy.ravel ( ) ] )
    Z = Z.reshape ( xx.shape )
    axe [ idx [ 0 ] , idx [ 1 ] ].contourf ( xx , yy , Z , alpha = 0.4 )
    axe [ idx [ 0 ] , idx [ 1 ] ].scatter ( x_train [ : , 0 ] , x_train [ : , 1 ] , c = y_train , s = 20 , edgecolor = 'k' )
    axe [ idx [ 0 ] , idx [ 1 ] ].set_title ( tt )
plt.show()

结果:

机器学习之KNN算法,朴素贝叶斯,决策树,SVM算法比较_第1张图片

你可能感兴趣的:(Python,机器学习,python,人工智能,机器学习,sklearn,神经网络)