学习速率的设置

先上一段代码

from __future__ import print_function
import torch
import torch.nn as nn
import numpy as np
import matplotlib.pyplot as plt

import time
%matplotlib inline
from IPython import display
# Hyper-parameters
input_size = 1
output_size = 1
learning_rate = 0.001

def load_data(filename):
    xys=[]
    with open(filename,'r') as f:
        for line in f:
            xys.append(map(float, line.strip().split()))
        xs, ys = zip(*xys)#解压,返回二维矩阵式
        return np.asarray(xs), np.asarray(ys)
x_t, y_t=load_data(r'train.txt')
x=[]
y=[]
for i in range(len(x_t)):
    x.append([x_t[i]])
    y.append([y_t[i]])

x_train = np.array(x, dtype=np.float32)
y_train = np.array(y, dtype=np.float32)

plt.scatter(x_train,y_train)
plt.show()
output_1_0.png
class Model(nn.Module):
    def __init__(self):
        super(Model, self).__init__()
        self.linear = nn.Linear(input_size, output_size) # One in and one out

    def forward(self, x):
        y_pred = self.linear(x)
        return y_pred
model = Model()
# model = nn.Linear(input_size, output_size)

criterion = nn.MSELoss()
optimizer = torch.optim.SGD(model.parameters(), lr=0.00001)  
for epoch in range(50):
    inputs=torch.from_numpy(x_train)
    targets=torch.from_numpy(y_train)
    
    outputs = model(inputs)
    loss = criterion(outputs, targets)

    # Zero gradients
    optimizer.zero_grad()
    loss.backward()
    optimizer.step()

    
    if (epoch+1) % 5 == 0:
        predicted = model(torch.from_numpy(x_train)).detach().numpy()
        plt.plot(x_train, y_train, 'ro', label='Original data')
        plt.plot(x_train, predicted, label='Fitted line')
        plt.legend()
        plt.show()
        display.clear_output(wait=True)
        plt.pause(1)
output_4_0.png

这玩意调了一整天,就因为学习速率,刚开始设置的只有0.001,本来感觉够小了,做出来图发现拟合线不断上下跳跃,刚开始还以为是数据读取的问题,换了随机数据发现正常。试着调小学习速率,拟合线才出来。第一次遇到……

你可能感兴趣的:(学习速率的设置)