多项式回归 例子

多项式回归

import numpy as np
from scipy import stats
import matplotlib.pyplot as plt
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error

data = np.array([[-3, 9],
[-2.5, 6.25],
[-2, 4],
[-1.5, 2.25],
[-1, 1],
[-0.5, 0.25],
[-0.5, 0.25],
[1, 1.01],
[1.1, 1.21],
[1.5, 2.25],
[2, 4],
[2.5, 6.25],
[3, 9], ])
m = data.shape[0] # 样本大小,行数13
print(data.shape) #返回矩阵的行数和列数(13, 2)

X = data[:, 0].reshape(-1, 1) # 将array转换成矩阵(变成n行1列)
y = data[:, 1].reshape(-1, 1)

print("X=",X)
print("y=",y)
#画相关的点
plt.plot(X, y, "b.")
plt.xlabel('X')
plt.ylabel('y')
plt.show()

# plt.savefig('regu-2.png', dpi=200) #保存为图片

#进行参数二阶处理
poly_features = PolynomialFeatures(degree=2, include_bias=False)
#degree多项式的阶数,一般默认是2。
#include_bias是否包含偏差列
X_poly = poly_features.fit_transform(X)
print("X_poly")
print(X_poly)

#进行线性回归计算
lin_reg = LinearRegression()
lin_reg.fit(X_poly, y)
print(lin_reg.intercept_, lin_reg.coef_) # [ 2.60996757] [[-0.12759678 0.9144504 ]]

#画拟合线
X_plot = np.linspace(-3, 3, 1000).reshape(-1, 1)#-3到3的1000个点
X_plot_poly = poly_features.fit_transform(X_plot)#进行数据平方
print("X_plot_poly")
print(X_plot_poly)

y_plot = np.dot(X_plot_poly, lin_reg.coef_.T) + lin_reg.intercept_ #np.dot矩阵乘法
plt.plot(X_plot, y_plot, 'r-')
plt.plot(X, y, 'b.')
plt.show()

#mean_squared_error"来计算误差
h = np.dot(X_poly, lin_reg.coef_.T) + lin_reg.intercept_

print(mean_squared_error(h, y)) # 0.0004398099539114421

testData=[[1.6]]
testData_poly = poly_features.fit_transform(testData)#进行数据平方
print("testData_poly")
print(testData_poly)

print(lin_reg.predict([[1.6,2.56]])) #预测值

你可能感兴趣的:(机器学习,人工智能,python,数据结构与算法)