Pytorch Swish()激活函数

import torch
import torch.nn as nn
import torch.optim as optim
class Net(nn.Module):
	def __init__(self):
		super(Net,self).__init__()
		self.main=nn.Sequential(
			nn.Linear(2,20),
			Swish(),
			nn.Linear(20,20),
			Swish(),
			nn.Linear(20,1)
		)
	def forward(self,x):
		output=self.main(x)
		return output	
class Swish(nn.Module):
	def __init(self,inplace=True):
		super(Swish,self).__init__()
		self.inplace=inplace
	def forward(self,x):
		if self.inplace:
			x.mul_(torch.sigmoid(x))
			return x
		else:
			return x*torch.sigmoid(x)	
# 初始化函数
def init_normal(m):
	if type(m) == nn.Linear():
		nn.init.kaming_normal_(m.weight)
if __name__==“__main__”:
	device = torch.device("cpu")
	net=Net().to(device)
	net.apply(init_normal)
	optimizer=optim.Adam(net.parameters(),lr=1e-3)
	
	input_data=xx  #输入数据
	label_data=xx #标签数据
	for epoch in range(epochs):
		net.zero_grad()
		input_data=torch.FLoatTensor(input_data).to(device)
		label_data=torch.FLoatTensor(label_data).to(device)
		
		input_data.requires_grad=True
		label_data.requires_grad=True
		
		out=net(input_data)
		loss_function=nn.MSEloss()
		loss=loss_function(out,label_data)
		loss.backward()
		optimizer.step()
		if epoch%100==0:
			torch.save(net.state_dict(),"D://test.pt")	

你可能感兴趣的:(深度学习与机器学习,pytorch,深度学习,python)