第四章-使用nn.Module实现全连接层

import torch as t
from torch import nn
from torch.autograd import Variable as V

class Linear(nn.Module): # 继承nn.Module
    def __init__(self,in_features, out_features):
        print("调用构造函数")
        super(Linear,self).__init__() # 等价于nn.Module.__init__(self)
        self.w = nn.Parameter(t.randn(in_features,out_features))
        self.b = nn.Parameter(t.randn(out_features))

    def forward(self, x):
        print("调用forward方法")
        x = x.mm(self.w) #矩阵相乘 x*w
        return x+self.b.expand_as(x)

layer = Linear(4,3) # 构建Linear实例对象:layer
input = V(t.randn(2,4))
output = layer(input) # layer(input)等价于layers.__call__(input),主要使用layer.forward(x)
	#另外还对钩子做了一些处理,所以尽量使用layer(input),而不是layer.forward(input)
print(output)

for name, parameter in layer.named_parameters():
    print(name,parameter)

输出结果:

调用构造函数
调用forward方法
tensor([[-1.9584e+00,  3.3580e+00,  1.7390e-01],
        [-1.4562e+00, -1.5603e-01, -7.6652e-05]], grad_fn=<AddBackward0>)
w Parameter containing:
tensor([[-0.3131, -1.9254, -0.1346],
        [-0.1360, -0.3300,  0.2781],
        [-1.5389,  0.0248, -0.8641],
        [ 0.0791, -0.5607, -0.8640]], requires_grad=True)
b Parameter containing:
tensor([-1.3734,  0.6565,  0.3848], requires_grad=True)

你可能感兴趣的:(PyTorch)