目的:激活函数实际上是一个非常简单的非线性函数,它作用在线性层的输出,只要让多个带有激活函数的神经元组合在一起,就具有拟合复杂非线性函数的强大能力!
常用的激活函数有:sigmoid、tanh、ReLU、Maxout等!
torch.nn.functional.relu()
0.3*torch.rand(x.size())
import torch
import matplotlib.pyplot as plt
x = torch.unsqueeze(torch.linspace(-3, 3, 10000), dim=1)
y = x.pow(3) + 0.3*torch.rand(x.size())
plt.scatter(x.numpy(), y.numpy(), s=0.01)
plt.show()
self.hidden
和一个输出增 self.out
,经过隐含层的数据需要经过ReLU激活函数进行非线性处理,最后经过输出层。from torch import nn, optim
import torch.nn.functional as F
class Net(nn.Module):
def __init__(self, input_feature, num_hidden, outputs):
super(Net, self).__init__()
self.hidden = nn.Linear(input_feature, num_hidden)
self.out = nn.Linear(num_hidden, outputs)
def forward(self, x):
x = F.relu((self.hidden(x)))
x = self.out(x)
return x
net = Net(input_feature=1, num_hidden=20, outputs=1).cuda()
inputs = x.cuda()
target = y.cuda()
import torch
import matplotlib.pyplot as plt
from torch import nn, optim
import torch.nn.functional as F
x = torch.unsqueeze(torch.linspace(-3, 3, 10000), dim=1)
y = x.pow(3) + 0.3*torch.rand(x.size())
# plt.scatter(x.numpy(), y.numpy(), s=0.01)
# plt.show()
class Net(nn.Module):
def __init__(self, input_feature, num_hidden, outputs):
super(Net, self).__init__()
self.hidden = nn.Linear(input_feature, num_hidden)
self.out = nn.Linear(num_hidden, outputs)
def forward(self, x):
x = F.relu((self.hidden(x)))
x = self.out(x)
return x
def train(model, criterion, optimizer, epochs):
for epoch in range(epochs):
output = model(inputs)
loss = criterion(output, target)
optimizer.zero_grad()
loss.backward()
optimizer.step()
# if epoch % 80 == 0:
# draw(output, loss)
return model, loss
def draw(output, loss):
output = output.cpu()
plt.cla()
plt.scatter(x.numpy(), y.numpy())
plt.plot(x.numpy(), output.data.numpy(), 'r-', lw=5)
plt.text(0.5, 0, 'loss=%s' % (loss.item()), fontdict={'size':20, 'color':'red'})
plt.show()
plt.pause(0.005)
net = Net(input_feature=1, num_hidden=20, outputs=1).cuda()
inputs = x.cuda()
target = y.cuda()
optimizer = optim.SGD(net.parameters(), lr=0.01)
criterion = nn.MSELoss()
net, loss = train(net, criterion, optimizer, 10000)
print("final loss:", loss.item())