梯度下降法--多元线性回归

import numpy as np
import matplotlib.pyplot as plt  
from mpl_toolkits.mplot3d import Axes3D    #画3d图像

#读入数据、切分数据
data = np.genfromtxt('Delivery.csv',delimiter=',') 
x_data = data[:,:-1] #只有最后一列不取
y_data = data[:,-1]  #只取最后一列

lr = 0.0001
theta0 = 0
theta1 = 0
theta2 = 0
epochs=1000

#最小二乘法
def compute_error(theta0,theta1,theta2,x_data,y_data):
    total_error = 0
    for i in range(0,len(x_data)):
        total_error += (y_data[i]-(theta0+theta1*x_data[i,0]+theta2*x_data[i,1]))**2
    return total_error/float(len(x_data))

#梯度下降
def gradient_descent_runner(x_data,y_data,theta0,theta1,theta2,lr,epochs):
    #计算总数据量
    m = float(len(x_data))
    for i in range(epochs):
        theta0_grad = 0
        theta1_grad = 0
        theta2_grad = 0
        #计算梯度的总和再求平均(公式)
        for j in range(0,len(x_data)):
            theta0_grad += -(1/m)*(y_data[j]-(theta0+theta1*x_data[j,0]+theta2*x_data[j,1]))
            theta1_grad += -(1/m)*x_data[j,0]*(y_data[j]-(theta0+theta1*x_data[j,0]+theta2*x_data[j,1]))
            theta2_grad += -(1/m)*x_data[j,1]*(y_data[j]-(theta0+theta1*x_data[j,0]+theta2*x_data[j,1]))
        #更新b和k
        theta0 = theta0 -(lr*theta0_grad)
        theta1 = theta1 -(lr*theta1_grad)
        theta2 = theta2 -(lr*theta2_grad)
    return theta0,theta1,theta2

print("Starting theta0 = {0}, theta1 = {1}, theta2 = {2}, error = {3}".
      format(theta0, theta1, theta2, compute_error(theta0, theta1, theta2, x_data, y_data)))
print("Running...")
theta0, theta1, theta2 = gradient_descent_runner(x_data, y_data, theta0, theta1, theta2, lr, epochs)
print("After {0} iterations theta0 = {1}, theta1 = {2}, theta2 = {3}, error = {4}".
      format(epochs, theta0, theta1, theta2, compute_error(theta0, theta1, theta2, x_data, y_data)))

ax = plt.figure().add_subplot(111, projection = '3d')  #figure是生成图,111的意思是:将画布分割成1行1列,图像画在从左到右从上到下的第1块
ax.scatter(x_data[:,0], x_data[:,1], y_data, c = 'r', marker = 'o', s = 100) #点为红色三角形  
x0 = x_data[:,0]
x1 = x_data[:,1]
# 生成网格矩阵
x0, x1 = np.meshgrid(x0, x1)
z = theta0 + x0*theta1 + x1*theta2
# 画3D图
ax.plot_surface(x0, x1, z)
#设置坐标轴  
ax.set_xlabel('Miles')  
ax.set_ylabel('Num of Deliveries')  
ax.set_zlabel('Time')  
  
#显示图像  
plt.show()  

解释:

np.eshgrid(x0,x1)

例:x0=[1,2,3],x1=[4,5,6]

结论是:(1,4),(1,5),(1,6)

              (2,4),(2,5),(2,6)

              (3,4),(3,5),(3,6)

 

你可能感兴趣的:(Machine,Learning)