【深度学习】Pytorch学习笔记(二)

pytorch搭建神经网络进行回归:

import torch
import imageio
import torch.nn.functional as F
import matplotlib.pyplot as plt

x = torch.sort(5 * torch.rand(100, 1), dim=0)[0]
y = torch.sin(x)
y[::5, :] += 3 * (0.5 - torch.rand(20, 1))

class Model(torch.nn.Module):

    # 初始化网络的层结构
    def __init__(self, n_features, n1_hidden, n2_hidden, n_output):
        super(Model, self).__init__()
        self.hidden_one = torch.nn.Linear(n_features, n1_hidden)
        self.hidden_two = torch.nn.Linear(n1_hidden, n2_hidden)
        self.out_layer = torch.nn.Linear(n2_hidden, n_output)

    # 重写父类方法,实现前向传播
    def forward(self, x):
        x = F.relu(self.hidden_one(x))
        x = F.relu(self.hidden_two(x))
        x = self.out_layer(x)
        return x

model =Model(1, 50, 10, 1)
plt.ion()

optimizer = torch.optim.Adam(model.parameters(), lr=1e-2)   # 定义优化器
loss_func = torch.nn.MSELoss()   # 定义损失函数

for t in range(1000):
    predict = model(x)   # 实例化模型对象
    loss = loss_func(predict, y)   # 计算损失

    optimizer.zero_grad()   # 梯度清零
    loss.backward()         # 实现后向传播,计算各参数梯度
    optimizer.step()        # 更新各参数的值

    if t % 5 == 0:
        plt.cla()
        plt.scatter(x.numpy(), y.numpy(), edgecolor='k', color='c', s=50)
        plt.plot(x.numpy(), predict.data.numpy(), color='c', lw=3.5)
        plt.pause(0.1)

plt.ioff()
plt.show()

代码参照B站莫烦视频稍作修改。

你可能感兴趣的:(深度学习,Pytorch)