第一步:net = LeNet(classes=2)
第二步:LeNet类,init(),super(LeNet, self).init()
第三步: Module类, init(),self._construct()
第四步: Module类, _construct(),构建_modules
第五步:Conv2d类,init(),super(Conv2d, self).init()
第六步:_ConvNd类,init(),super(_ConvNd, self).init()
第七步:Module类, init(),self._construct()
第八步:Module类, _construct()
第九步:Module类, __setattr(),对_modules赋值
第十步:执行完所有网络层的构建
第十一步:返回net
改写代码如下
from collections import OrderedDict
class AlexNet(nn.Module):
def __init__(self, num_classes=1000):
super(AlexNet, self).__init__()
self.features = nn.Sequential(OrderedDict({
'conv1': nn.Conv2d(3, 64, kernel_size=11, stride=4, padding=2),
'relu1': nn.ReLU(inplace=True),
'pool1': nn.MaxPool2d(kernel_size=3, stride=2),
'conv2': nn.Conv2d(64, 192, kernel_size=5, padding=2),
'relu2': nn.ReLU(inplace=True),
'pool2': nn.MaxPool2d(kernel_size=3, stride=2),
'conv3': nn.Conv2d(192, 384, kernel_size=3, padding=1),
'relu3': nn.ReLU(inplace=True),
'conv4': nn.Conv2d(384, 256, kernel_size=3, padding=1),
'relu4': nn.ReLU(inplace=True),
'conv5': nn.Conv2d(256, 256, kernel_size=3, padding=1),
'relu5': nn.ReLU(inplace=True),
'pool3': nn.MaxPool2d(kernel_size=3, stride=2),
}))
self.avgpool = nn.AdaptiveAvgPool2d((6, 6))
self.classifier = nn.Sequential(OrderedDict({
'drop1': nn.Dropout(),
'fc1': nn.Linear(256 * 6 * 6, 4096),
'relu6': nn.ReLU(inplace=True),
'drop2': nn.Dropout(),
'fc2': nn.Linear(4096, 4096),
'relu7': nn.ReLU(inplace=True),
'fc3': nn.Linear(4096, num_classes),
}))
def forward(self, x):
x = self.features(x)
x = self.avgpool(x)
x = torch.flatten(x, 1)
x = self.classifier(x)
return x