pytorch学习笔记-神经网络(非线性激活)

以relu为例

import torch
import torchvision.transforms
from torch import nn
from torch.nn import ReLU, Sigmoid
from torch.utils.data import DataLoader
from torch.utils.tensorboard import SummaryWriter

input=torch.tensor([[1,-0.5],
                    [-1,3]])

intput=torch.reshape=(input,(-1,1,2,2))

dataset=torchvision.datasets.CIFAR10("../data",train=False,download=True,
                                     transform=torchvision.transforms.ToTensor())
dataloader=DataLoader(dataset,batch_size=64)

class KELE(nn.Module):
    def __init__(self):
        super(KELE,self).__init__()
        self.relu1=ReLU()
        self.sigmoid1=Sigmoid()

    def forward(self,input):
        output=self.sigmoid1(input)
        return output
kele=KELE()

writer=SummaryWriter("../logs_relu")
step=0
for data in dataloader:
    imgs,targets=data
    writer.add_images("input", imgs,global_step=step)
    output=kele(imgs)
    writer.add_images("output",output,step)
    step+=1
writer.close()

你可能感兴趣的:(pytorch,神经网络,深度学习)