pytorch RNN 循环神经网络 回归

pytorch RNN 循环神经网络 回归_第1张图片
image.png
import torch
from torch import nn
from torch.autograd import Variable
import numpy as np
import matplotlib.pyplot as plt

torch.manual_seed(1)

TIME_STEP = 10
INPUT_SIZE = 1
LR = 0.02

# show data
# steps = np.linspace(0,np.pi*2,100,dtype=np.float32)
# x_np = np.sin(steps)
# y_np = np.cos(steps)
# plt.plot(steps,y_np,'r-',label='target (cos)')
# plt.plot(steps,x_np,'b-',label='input (sin)')
# plt.legend(loc='best')
# plt.show()

class RNN(nn.Module):
    def __init__(self):
        super(RNN,self).__init__()
        self.rnn = nn.RNN(
            input_size = INPUT_SIZE,
            hidden_size = 32,
            num_layers = 1,
            batch_first = True,
        )
        self.out = nn.Linear(32,1)

    def forward(self,x,h_state):
        r_out,h_state = self.rnn(x,h_state)
        outs = [] # save all predictions
        for time_step in range(r_out.size(1)): # calculate output for each time step
            outs.append(self.out(r_out[:,time_step,:]))
        return torch.stack(outs,dim=1),h_state

rnn = RNN()
print(rnn)

optimizer = torch.optim.Adam(rnn.parameters(),lr=LR) # optimize all rnn parameters
loss_func = nn.MSELoss()

h_state = None # for initial hidden state

plt.figure(1,figsize=(12,5))
plt.ion() # continuously plot

for step in range(60):
    start,end = step * np.pi,(step+1)*np.pi # time  range
    # use sin predicts cos
    steps = np.linspace(start,end,TIME_STEP,dtype=np.float32)
    x_np = np.sin(steps) # float32 for converting torch FloatTensor
    y_np = np.cos(steps)

    x = Variable(torch.from_numpy(x_np[np.newaxis,:,np.newaxis])) # shape (batch,time_step,input_size)
    y = Variable(torch.from_numpy(y_np[np.newaxis,:,np.newaxis]))

    prediction,h_state = rnn(x,h_state) # rnn output
    # !! next step is important !!
    h_state = Variable(h_state.data) # repack the hidden state, break the connection from last iteration

    loss = loss_func(prediction,y)
    optimizer.zero_grad()
    loss.backward()
    optimizer.step()

    plt.plot(steps,y_np.flatten(),'r-')
    plt.plot(steps,prediction.data.numpy().flatten(),'b-')
    plt.draw()
    plt.pause(0.05)

plt.ioff()
plt.show()

你可能感兴趣的:(pytorch RNN 循环神经网络 回归)