李宏毅机器学习——回归实验

import numpy as np
import matplotlib.pyplot as plt
from pylab import mpl

# matplotlib没有中文字体,动态解决
plt.rcParams['font.sans-serif'] = ['Simhei']  # 显示中文
mpl.rcParams['axes.unicode_minus'] = False  # 解决保存图像是负号'-'显示为方块的问题
x_data = [338., 333., 328., 207., 226., 25., 179., 60., 208., 606.]
y_data = [640., 633., 619., 393., 428., 27., 193., 66., 226., 1591.]
x_d = np.asarray(x_data)
y_d = np.asarray(y_data)
x = np.arange(-200, -100, 1)
y = np.arange(-5, 5, 0.1)
Z = np.zeros((len(x), len(y)))
X, Y = np.meshgrid(x, y)
# loss
for i in range(len(x)):
    for j in range(len(y)):
        b = x[i]
        w = y[j]
        Z[j][i] = 0  # meshgrid吐出结果:y为行,x为列
        for n in range(len(x_data)):
            Z[j][i] += (y_data[n] - b - w * x_data[n]) ** 2
        Z[j][i] /= len(x_data)
# linear regression
#b = -120
#w = -4
b=0.01
w=0.01
lr = 0.000005
iteration = 1400000

b_history = [b]
w_history = [w]
loss_history = []
import time
start = time.time()
for i in range(iteration):
    m = float(len(x_d))
    y_hat = w * x_d  +b
    loss = np.dot(y_d - y_hat, y_d - y_hat) / m
    grad_b = -2.0 * np.sum(y_d - y_hat) / m
    grad_w = -2.0 * np.dot(y_d - y_hat, x_d) / m
    # update param
    b -= lr * grad_b
    w -= lr * grad_w

    b_history.append(b)
    w_history.append(w)
    loss_history.append(loss)
    if i % 10000 == 0:
        print("Step %i, w: %0.4f, b: %.4f, Loss: %.4f" % (i, w, b, loss))
end = time.time()
print("大约需要时间:",end-start)
Step 0, w: 1.8598, b: 0.0148, Loss: 411859.4404
Step 10000, w: 2.1428, b: -5.1635, Loss: 19553.8375
Step 20000, w: 2.1572, b: -10.2003, Loss: 19046.4218
Step 30000, w: 2.1713, b: -15.0986, Loss: 18566.5132
Step 40000, w: 2.1850, b: -19.8623, Loss: 18112.6205
Step 50000, w: 2.1983, b: -24.4951, Loss: 17683.3334
Step 60000, w: 2.2113, b: -29.0005, Loss: 17277.3180
Step 70000, w: 2.2239, b: -33.3822, Loss: 16893.3127
Step 80000, w: 2.2361, b: -37.6434, Loss: 16530.1244
Step 90000, w: 2.2480, b: -41.7875, Loss: 16186.6246
Step 100000, w: 2.2596, b: -45.8177, Loss: 15861.7459
Step 110000, w: 2.2709, b: -49.7372, Loss: 15554.4790
Step 120000, w: 2.2818, b: -53.5489, Loss: 15263.8691
Step 130000, w: 2.2925, b: -57.2559, Loss: 14989.0131
Step 140000, w: 2.3028, b: -60.8610, Loss: 14729.0571
Step 150000, w: 2.3129, b: -64.3671, Loss: 14483.1934
Step 160000, w: 2.3227, b: -67.7767, Loss: 14250.6580
Step 170000, w: 2.3322, b: -71.0927, Loss: 14030.7283
Step 180000, w: 2.3415, b: -74.3175, Loss: 13822.7211
Step 190000, w: 2.3505, b: -77.4537, Loss: 13625.9899
Step 200000, w: 2.3593, b: -80.5038, Loss: 13439.9236
Step 210000, w: 2.3678, b: -83.4700, Loss: 13263.9439
Step 220000, w: 2.3761, b: -86.3546, Loss: 13097.5042
Step 230000, w: 2.3842, b: -89.1600, Loss: 12940.0871
Step 240000, w: 2.3920, b: -91.8883, Loss: 12791.2037
Step 250000, w: 2.3996, b: -94.5416, Loss: 12650.3912
Step 260000, w: 2.4070, b: -97.1220, Loss: 12517.2122
Step 270000, w: 2.4143, b: -99.6315, Loss: 12391.2529
Step 280000, w: 2.4213, b: -102.0720, Loss: 12272.1218
Step 290000, w: 2.4281, b: -104.4455, Loss: 12159.4488
Step 300000, w: 2.4347, b: -106.7537, Loss: 12052.8839
Step 310000, w: 2.4412, b: -108.9984, Loss: 11952.0958
Step 320000, w: 2.4474, b: -111.1815, Loss: 11856.7715
Step 330000, w: 2.4535, b: -113.3046, Loss: 11766.6147
Step 340000, w: 2.4595, b: -115.3694, Loss: 11681.3454
Step 350000, w: 2.4653, b: -117.3773, Loss: 11600.6985
Step 360000, w: 2.4709, b: -119.3302, Loss: 11524.4234
Step 370000, w: 2.4763, b: -121.2293, Loss: 11452.2833
Step 380000, w: 2.4816, b: -123.0762, Loss: 11384.0539
Step 390000, w: 2.4868, b: -124.8724, Loss: 11319.5232
Step 400000, w: 2.4918, b: -126.6192, Loss: 11258.4907
Step 410000, w: 2.4967, b: -128.3181, Loss: 11200.7668
Step 420000, w: 2.5014, b: -129.9702, Loss: 11146.1721
Step 430000, w: 2.5061, b: -131.5769, Loss: 11094.5370
Step 440000, w: 2.5106, b: -133.1395, Loss: 11045.7011
Step 450000, w: 2.5149, b: -134.6591, Loss: 10999.5125
Step 460000, w: 2.5192, b: -136.1370, Loss: 10955.8279
Step 470000, w: 2.5233, b: -137.5742, Loss: 10914.5114
Step 480000, w: 2.5273, b: -138.9719, Loss: 10875.4346
Step 490000, w: 2.5312, b: -140.3313, Loss: 10838.4762
Step 500000, w: 2.5350, b: -141.6532, Loss: 10803.5214
Step 510000, w: 2.5387, b: -142.9389, Loss: 10770.4614
Step 520000, w: 2.5423, b: -144.1892, Loss: 10739.1936
Step 530000, w: 2.5458, b: -145.4051, Loss: 10709.6209
Step 540000, w: 2.5492, b: -146.5877, Loss: 10681.6513
Step 550000, w: 2.5525, b: -147.7377, Loss: 10655.1979
Step 560000, w: 2.5557, b: -148.8561, Loss: 10630.1786
Step 570000, w: 2.5588, b: -149.9438, Loss: 10606.5156
Step 580000, w: 2.5619, b: -151.0016, Loss: 10584.1353
Step 590000, w: 2.5648, b: -152.0303, Loss: 10562.9683
Step 600000, w: 2.5677, b: -153.0308, Loss: 10542.9488
Step 610000, w: 2.5705, b: -154.0037, Loss: 10524.0145
Step 620000, w: 2.5732, b: -154.9499, Loss: 10506.1066
Step 630000, w: 2.5759, b: -155.8701, Loss: 10489.1696
Step 640000, w: 2.5784, b: -156.7651, Loss: 10473.1507
Step 650000, w: 2.5809, b: -157.6354, Loss: 10458.0002
Step 660000, w: 2.5834, b: -158.4818, Loss: 10443.6709
Step 670000, w: 2.5857, b: -159.3049, Loss: 10430.1185
Step 680000, w: 2.5880, b: -160.1055, Loss: 10417.3008
Step 690000, w: 2.5903, b: -160.8840, Loss: 10405.1779
Step 700000, w: 2.5925, b: -161.6411, Loss: 10393.7122
Step 710000, w: 2.5946, b: -162.3774, Loss: 10382.8680
Step 720000, w: 2.5966, b: -163.0935, Loss: 10372.6118
Step 730000, w: 2.5986, b: -163.7899, Loss: 10362.9115
Step 740000, w: 2.6006, b: -164.4672, Loss: 10353.7370
Step 750000, w: 2.6025, b: -165.1258, Loss: 10345.0599
Step 760000, w: 2.6043, b: -165.7664, Loss: 10336.8532
Step 770000, w: 2.6061, b: -166.3893, Loss: 10329.0914
Step 780000, w: 2.6078, b: -166.9952, Loss: 10321.7504
Step 790000, w: 2.6095, b: -167.5843, Loss: 10314.8073
Step 800000, w: 2.6112, b: -168.1573, Loss: 10308.2406
Step 810000, w: 2.6128, b: -168.7146, Loss: 10302.0298
Step 820000, w: 2.6143, b: -169.2565, Loss: 10296.1558
Step 830000, w: 2.6159, b: -169.7835, Loss: 10290.6002
Step 840000, w: 2.6173, b: -170.2960, Loss: 10285.3458
Step 850000, w: 2.6188, b: -170.7945, Loss: 10280.3762
Step 860000, w: 2.6202, b: -171.2793, Loss: 10275.6760
Step 870000, w: 2.6215, b: -171.7507, Loss: 10271.2306
Step 880000, w: 2.6228, b: -172.2092, Loss: 10267.0262
Step 890000, w: 2.6241, b: -172.6551, Loss: 10263.0497
Step 900000, w: 2.6254, b: -173.0887, Loss: 10259.2888
Step 910000, w: 2.6266, b: -173.5104, Loss: 10255.7317
Step 920000, w: 2.6277, b: -173.9205, Loss: 10252.3675
Step 930000, w: 2.6289, b: -174.3194, Loss: 10249.1857
Step 940000, w: 2.6300, b: -174.7072, Loss: 10246.1763
Step 950000, w: 2.6311, b: -175.0845, Loss: 10243.3301
Step 960000, w: 2.6321, b: -175.4513, Loss: 10240.6382
Step 970000, w: 2.6332, b: -175.8081, Loss: 10238.0922
Step 980000, w: 2.6342, b: -176.1551, Loss: 10235.6842
Step 990000, w: 2.6351, b: -176.4925, Loss: 10233.4068
Step 1000000, w: 2.6361, b: -176.8207, Loss: 10231.2528
Step 1010000, w: 2.6370, b: -177.1398, Loss: 10229.2156
Step 1020000, w: 2.6379, b: -177.4502, Loss: 10227.2889
Step 1030000, w: 2.6388, b: -177.7520, Loss: 10225.4665
Step 1040000, w: 2.6396, b: -178.0456, Loss: 10223.7430
Step 1050000, w: 2.6404, b: -178.3311, Loss: 10222.1129
Step 1060000, w: 2.6412, b: -178.6087, Loss: 10220.5712
Step 1070000, w: 2.6420, b: -178.8787, Loss: 10219.1130
Step 1080000, w: 2.6428, b: -179.1413, Loss: 10217.7339
Step 1090000, w: 2.6435, b: -179.3967, Loss: 10216.4296
Step 1100000, w: 2.6442, b: -179.6450, Loss: 10215.1959
Step 1110000, w: 2.6449, b: -179.8865, Loss: 10214.0292
Step 1120000, w: 2.6456, b: -180.1214, Loss: 10212.9257
Step 1130000, w: 2.6462, b: -180.3498, Loss: 10211.8820
Step 1140000, w: 2.6469, b: -180.5720, Loss: 10210.8949
Step 1150000, w: 2.6475, b: -180.7880, Loss: 10209.9613
Step 1160000, w: 2.6481, b: -180.9982, Loss: 10209.0783
Step 1170000, w: 2.6487, b: -181.2025, Loss: 10208.2431
Step 1180000, w: 2.6492, b: -181.4012, Loss: 10207.4533
Step 1190000, w: 2.6498, b: -181.5945, Loss: 10206.7063
Step 1200000, w: 2.6503, b: -181.7824, Loss: 10205.9997
Step 1210000, w: 2.6509, b: -181.9652, Loss: 10205.3315
Step 1220000, w: 2.6514, b: -182.1430, Loss: 10204.6995
Step 1230000, w: 2.6519, b: -182.3158, Loss: 10204.1017
Step 1240000, w: 2.6524, b: -182.4839, Loss: 10203.5364
Step 1250000, w: 2.6528, b: -182.6474, Loss: 10203.0017
Step 1260000, w: 2.6533, b: -182.8065, Loss: 10202.4960
Step 1270000, w: 2.6537, b: -182.9611, Loss: 10202.0177
Step 1280000, w: 2.6542, b: -183.1115, Loss: 10201.5653
Step 1290000, w: 2.6546, b: -183.2577, Loss: 10201.1375
Step 1300000, w: 2.6550, b: -183.4000, Loss: 10200.7328
Step 1310000, w: 2.6554, b: -183.5383, Loss: 10200.3501
Step 1320000, w: 2.6558, b: -183.6728, Loss: 10199.9881
Step 1330000, w: 2.6562, b: -183.8037, Loss: 10199.6458
Step 1340000, w: 2.6565, b: -183.9309, Loss: 10199.3220
Step 1350000, w: 2.6569, b: -184.0546, Loss: 10199.0158
Step 1360000, w: 2.6572, b: -184.1750, Loss: 10198.7261
Step 1370000, w: 2.6576, b: -184.2920, Loss: 10198.4522
Step 1380000, w: 2.6579, b: -184.4058, Loss: 10198.1931
Step 1390000, w: 2.6582, b: -184.5165, Loss: 10197.9481
大约需要时间: 16.391692399978638
# plot the figure
plt.contourf(x, y, Z, 50, alpha=0.5, cmap=plt.get_cmap('jet'))  # 填充等高线
plt.plot([-188.4], [2.67], 'x', ms=12, mew=3, color="orange")
plt.plot(b_history, w_history, 'o-', ms=3, lw=1.5, color='black')
plt.xlim(-200, -100)
plt.ylim(-5, 5)
plt.xlabel(r'$b$')
plt.ylabel(r'$w$')
plt.title("线性回归")
plt.show()
output_5_0.png
# 对w和b采用不同的lr
# linear regression
b = -120
w = -4
lr = 1
iteration = 100000

b_history = [b]
w_history = [w]

lr_b=0
lr_w=0
import time
start = time.time()
for i in range(iteration):
    b_grad=0.0
    w_grad=0.0
    for n in range(len(x_data)):
        b_grad=b_grad-2.0*(y_data[n]-n-w*x_data[n])*1.0
        w_grad= w_grad-2.0*(y_data[n]-n-w*x_data[n])*x_data[n]

    lr_b=lr_b+b_grad**2
    lr_w=lr_w+w_grad**2
    # update param
    b -= lr/np.sqrt(lr_b) * b_grad
    w -= lr /np.sqrt(lr_w) * w_grad

    b_history.append(b)
    w_history.append(w)
# plot the figure
plt.contourf(x, y, Z, 50, alpha=0.5, cmap=plt.get_cmap('jet'))  # 填充等高线
plt.plot([-188.4], [2.67], 'x', ms=12, mew=3, color="orange")
plt.plot(b_history, w_history, 'o-', ms=3, lw=1.5, color='black')
plt.xlim(-200, -100)
plt.ylim(-5, 5)
plt.xlabel(r'$b$')
plt.ylabel(r'$w$')
plt.title("线性回归")
plt.show()
output_7_0.png

你可能感兴趣的:(李宏毅机器学习——回归实验)