Torch.nn常见用法及用途

文章目录

  • Torch.nn常见用法及用途
    • torch.nn
    • BasicConv

Torch.nn常见用法及用途

torch.nn

import torch
import torch.nn as nn
import numpy as np

#class torch.nn.Conv2d(in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True)
#dilation(int or tuple, optional)  卷积核元素之间的间距,默认为1  作用:在不增大计算量的情况下,提升感受野
#groups(int, optional)  从输入通道到输出通道的阻塞连接数。默认为1  作用:减少计算参数;例:input(6,6,1,1) ===> nn.Conv2d(6,6,groups=2) ===> output(6,3,1,1)
def conv2d(input):
    cv = nn.Conv2d(16,32,3,stride=1,padding=0,dilation=3)
    h = cv(input)
    print(h.size())


#class torch.nn.MaxPool2d(kernel_size, stride=None, padding=0, dilation=1, return_indices=False, ceil_mode=False)
#input(N,C,H,W) ===> maxpool ===> output(N,C,H_out,W_out)
def maxpool2d(input):
    mp = nn.MaxPool2d(kernel_size=3,stride=2,padding=0)
    h = mp(input)
    print(h.size())

#class torch.nn.ConvTranspose2d(in_channels, out_channels, kernel_size, stride=1, padding=0, output_padding=0, groups=1, bias=True, dilation=1)
#二维转置卷积,一般作为输入的梯度,注意:padding = kernel_size - 1 使输入输出的维度可以相乘
def convtranspose2d(input):
    td = nn.ConvTranspose2d(16,16,kernel_size=3,stride=1,padding=1)
    h = td(input)
    print(h.size())

#class torch.nn.BatchNorm2d(num_features, eps=1e-05, momentum=0.1, affine=True)
#num_features为输入的channels数,一般为默认无需大改动
def BN(input):
    print("input:",input)
    bn = nn.BatchNorm2d(16,eps = 1e-05,affine=True)
    h = bn(input)
    print(h.size())
    print("BN(input):",h)

def relu(input):
    rl = nn.ReLU()
    h = rl(input)
    print(h)


if __name__ == "__main__":
    input = torch.randn(1,16,12,12)
    #测试卷积层
    conv2d(input)
    #测试池化层
    maxpool2d(input)
    #测试转置卷积
    convtranspose2d(input)
    #测试标准化
    BN(input)
    
    input1  = torch.randn(2)
    print(input1)
    #测试ReLu
    relu(input1)

BasicConv

#在搭建网络时往往会搭建一个基础块,简称CBA(这里主要差别在A,一般不同的网络对activate都会有变化)
class BasicConv(nn.Module):
    def __init__(self,input,output,k_s,stride = 1):
        super(BasicConv,self).__init__()
        self.conv = nn.Conv2d(input,output,kernel_size=3,stride=1,bias=True)
        self.BN = nn.BatchNorm2d(output)
        self.activate = nn.ReLU()

    #向前传播
    def forward(self,x):
        x = self.conv(x)
        x = self.BN(x)
        x = self.activate(x)
        return x

你可能感兴趣的:(深度学习,算法,神经网络,卷积神经网络,卷积)