目前看来knn与gbrt效果挺好
花絮:python各种知识点
1.自定义函数,结尾请用return返回结果方便调用
2.np.arange:产生等差数列
3.f - string:Python 字符串的格式化,从 %格式到 format再到 f-string,格式化的方式是越来越直观,同时 f-string 的效率似乎也比前两个高。
参考文档 :https://www.cnblogs.com/traditional/p/9445930.html
#回归
import numpy as np
import matplotlib.pyplot as plt
from sklearn import tree
from sklearn.linear_model import LinearRegression
from sklearn import svm
from sklearn import neighbors
from sklearn import ensemble
from sklearn.tree import ExtraTreeRegressor
def gen_data(x1,x2):
y=np.sin(x1)*0.5 +np.cos(x2)*0.5 + 0.1*x1 +np.random.random(1)
return y #知识点1
x1_train = np.linspace(0,50,100)
x2_train = np.linspace(-10,10,100)
data_train = np.array([[x1,x2,gen_data(x1,x2)] for x1,x2 in zip(x1_train,x2_train)])
x1_test = np.linspace(0,50,100)+np.random.random(100)*0.4
x2_test = np.linspace(-10,10,100)+np.random.random(100)*0.8
data_test = np.array([[x1,x2,gen_data(x1,x2)] for x1,x2 in zip(x1_test,x2_test)])
def try_different_mothods(model,method):
model.fit(data_train[:,0:2],data_train[:,2])
score = model.score(data_test[:,0:2],data_test[:,2])
result = model.predict(data_test[:,0:2])
plt.figure()
plt.plot(np.arange(len(result)),data_test[:,2],'go-',label='True value') #知识点2
plt.plot(np.arange(len(result)),result,'ro-',label='predict result')
plt.title(f'method:{method},score:{score}') #知识点3
plt.legend(loc='best')
model_decison_tree_regression = tree.DecisionTreeRegressor()
model_linear_regression = LinearRegression()
model_svm = svm.SVR()
model_k_neighbor = neighbors.KNeighborsRegressor()
model_random_forest_regressor = ensemble.RandomForestRegressor(n_estimators=20) # 使用20个决策树
model_adaboost_regressor = ensemble.AdaBoostRegressor(n_estimators=50) # 这里使用50个决策树
model_gradient_boosting_regressor = ensemble.GradientBoostingRegressor(n_estimators=100) # 这里使用100个决策树
model_bagging_regressor = ensemble.BaggingRegressor()
model_extra_tree_regressor = ExtraTreeRegressor()
try_different_mothods(model_decison_tree_regression,'DecisionTree')
try_different_mothods(model_linear_regression,'LR')
try_different_mothods(model_svm,'svm')
try_different_mothods(model_k_neighbor,'knn')
try_different_mothods(model_random_forest_regressor,'rf')
try_different_mothods(model_adaboost_regressor,'adaboost')
try_different_mothods(model_gradient_boosting_regressor,'gbrt')
try_different_mothods(model_bagging_regressor,'bagging')
try_different_mothods(model_extra_tree_regressor,'ExactTree')