激活函数绘图代码

1.ELU

import torch
import torch.nn as nn
x = torch.randn(100, 10)
# torch.nn.SELU(inplace=False)
func = nn.ELU()
y = func(x)
print(x[0])
print(y[0])
# tensor([-0.6763,  0.6669, -2.0544, -1.8889, -0.3581,  0.0884, -1.3734,  0.9181,0.4205,  0.1281])
# tensor([-0.4915,  0.6669, -0.8718, -0.8488, -0.3010,  0.0884, -0.7467,  0.9181,0.4205,  0.1281])
 
 
import torch
import torch.nn as nn
import torch.nn.functional as F
import matplotlib.pyplot as plt
 
class ELU(nn.Module):
    def __init__(self):
        super().__init__()
    def forward(self,x):
        x = F.elu(x)
        return x
 
class ELU(nn.Module):
    def __init__(self, alpha=1.0):
        super(ELU, self).__init__()
        self.alpha=alpha
 
    def forward(self, x):
        temp1 = F.relu(x)
        temp2 = self.alpha * (-1*F.relu(1-torch.exp(x)))
        return temp1 + temp2
x = torch.linspace(-10, 10, 1000).unsqueeze(0)
 
func = nn.ELU()
func2 = ELU()
 
y1 = func(x)
y2 = func2(x)
 
plt.plot(x.detach().numpy()[0],y1.detach().numpy()[0],label="PyTorch")
plt.plot(x.detach().numpy()[0],y2.detach().numpy()[0],label="myELU")
plt.legend()
plt.grid()
plt.show()

CELU:

import torch
import torch.nn as nn
x = torch.randn(100, 10)
# torch.nn.SELU(inplace=False)
func = nn.CELU()
y = func(x)
print(x[0])
print(y[0])
# tensor([-1.1315, -0.3425,  0.7596, -1.8625, -0.8638,  0.2654,  0.9773, -0.5946, 0.1348, -0.7941])
# tensor([-0.6775, -0.2900,  0.7596, -0.8447, -0.5785,  0.2654,  0.9773, -0.4482, 0.1348, -0.5480])
 
 
import torch
import torch.nn as nn
import torch.nn.functional as F
from matplotlib import pyplot as plt
class CELU(nn.Module):
    def __init__(self, alpha=1.0):
        super(CELU, self).__init__()
        self.alpha=alpha
 
    def forward(self, x):
        temp1 = F.relu(x)
        # temp2 = self.alpha * (F.elu(-1*F.relu(-1.*x/self.alpha)))
        temp2 = self.alpha * (-1*F.relu(1-torch.exp(x/self.alpha)))
        return temp1 + temp2
 
x = torch.linspace(-10, 10, 1000).unsqueeze(0)
 
func = nn.CELU()
func2 = CELU()
 
y1 = func(x)
y2 = func2(x)
 
plt.plot(x.detach().numpy()[0],y1.detach().numpy()[0],label="PyTorch")
plt.plot(x.detach().numpy()[0],y2.detach().numpy()[0],label="myCELU")
plt.legend()
plt.grid()
plt.show()

SELU:

import torch
import torch.nn as nn
import torch.nn.functional as F
from matplotlib import pyplot as plt
class SELU(nn.Module):
    def __init__(self):
        super(SELU, self).__init__()
        self.alpha = 1.6732632423543772848170429916717
        self.scale = 1.0507009873554804934193349852946
    def forward(self, x):
        temp1 = self.scale * F.relu(x)
        # temp2 = self.scale * self.alpha * (F.elu(-1*F.relu(-1*x)))
        temp2 = self.scale * self.alpha * (-1*F.relu(1-torch.exp(x)))
        return temp1 + temp2
 
x = torch.linspace(-10, 10, 1000).unsqueeze(0)
 
func2 = nn.SELU()
func = SELU()
 
y1 = func(x)
y2 = func2(x)
 
plt.plot(x.detach().numpy()[0],y1.detach().numpy()[0],label="mySELU")
plt.plot(x.detach().numpy()[0],y2.detach().numpy()[0],label="PyTorch")
plt.legend()
plt.grid()
plt.show()

GELU:

import torch.nn as nn
import torch
import matplotlib.pyplot as plt
import numpy as np
 
class GELU(nn.Module):
	def __init__(self, beta=1):
		super(GELU, self).__init__()
	def forward(self, x):
		return F.gelu(x)
 
class GELU(nn.Module):
	def __init__(self, beta=1):
		super(GELU, self).__init__()
	def forward(self, x):
		x_= np.sqrt(2./np.pi) * (x+0.044715 * x * x * x)
		return 0.5 * x * (1 + (torch.exp(x_) - torch.exp(-x_))/(torch.exp(x_) + torch.exp(-x_)))
 
x = torch.linspace(-10, 10, 1000).unsqueeze(0)
# torch.nn.SELU(inplace=False)
func2 = nn.GELU()
func = GELU()
 
y1 = func(x)
y2 = func2(x)
 
plt.plot(x.detach().numpy()[0],y1.detach().numpy()[0],label="myGELU")
plt.plot(x.detach().numpy()[0],y2.detach().numpy()[0],label="PyTorch")
plt.legend()
plt.grid()
plt.show()

import matplotlib.pyplot as plt
import numpy as np

class ActivateFunc():
    def __init__(self, x, b=None, lamb=None, alpha=None, a=None):
        super(ActivateFunc, self).__init__()
        self.x = x
        self.b = b
        self.lamb = lamb
        self.alpha = alpha
        self.a = a

    def Sigmoid(self):
        y = np.exp(self.x) / (np.exp(self.x) + 1)
        y_grad = y*(1-y)
        return [y, y_grad]

    def Tanh(self):
        y = np.tanh(self.x)
        y_grad = 1 - y * y
        return [y, y_grad]

    def Swish(self): #b是一个常数,指定b
        y = self.x * (np.exp(self.b*self.x) / (np.exp(self.b*self.x) + 1))
        y_grad = np.exp(self.b*self.x)/(1+np.exp(self.b*self.x)) + self.x * (self.b*np.exp(self.b*self.x) / ((1+np.exp(self.b*self.x))*(1+np.exp(self.b*self.x))))
        return [y, y_grad]

    def ELU(self): # alpha是个常数,指定alpha
        y = np.where(self.x > 0, self.x, self.alpha * (np.exp(self.x) - 1))
        y_grad = np.where(self.x > 0, 1, self.alpha * np.exp(self.x))
        return [y, y_grad]

    def SELU(self):  # lamb大于1,指定lamb和alpha
        y = np.where(self.x > 0, self.lamb * self.x, self.lamb * self.alpha * (np.exp(self.x) - 1))
        y_grad = np.where(self.x > 0, self.lamb*1, self.lamb * self.alpha * np.exp(self.x))
        return [y, y_grad]

    def ReLU(self):
        y = np.where(self.x < 0, 0, self.x)
        y_grad = np.where(self.x < 0, 0, 1)
        return [y, y_grad]

    def PReLU(self):    # a大于1,指定a
        y = np.where(self.x < 0, self.x / self.a, self.x)
        y_grad = np.where(self.x < 0, 1 / self.a, 1)
        return [y, y_grad]

    def LeakyReLU(self):   # a大于1,指定a
        y = np.where(self.x < 0, self.x / self.a, self.x)
        y_grad = np.where(self.x < 0, 1 / self.a, 1)
        return [y, y_grad]

    def Mish(self):
        f = 1 + np.exp(x)
        y = self.x * ((f*f-1) / (f*f+1))
        y_grad = (f*f-1) / (f*f+1) + self.x*(4*f*(f-1)) / ((f*f+1)*(f*f+1))
        return [y, y_grad]

    def ReLU6(self):
        y = np.where(np.where(self.x < 0, 0, self.x) > 6, 6, np.where(self.x < 0, 0, self.x))
        y_grad = np.where(self.x > 6, 0, np.where(self.x < 0, 0, 1))
        return [y, y_grad]

    def Hard_Swish(self):
        f = self.x + 3
        relu6 = np.where(np.where(f < 0, 0, f) > 6, 6, np.where(f < 0, 0, f))
        relu6_grad = np.where(f > 6, 0, np.where(f < 0, 0, 1))
        y = self.x * relu6 / 6
        y_grad = relu6 / 6 + self.x * relu6_grad / 6
        return [y, y_grad]

    def Hard_Sigmoid(self):
        f = (2 * self.x + 5) / 10
        y = np.where(np.where(f > 1, 1, f) < 0, 0, np.where(f > 1, 1, f))
        y_grad = np.where(f > 0, np.where(f >= 1, 0, 1 / 5), 0)
        return [y, y_grad]


def PlotActiFunc(x, y, title):
    plt.grid(which='minor', alpha=0.2)
    plt.grid(which='major', alpha=0.5)
    plt.plot(x, y)
    plt.title(title)
    plt.show()

def PlotMultiFunc(x, y):
    plt.grid(which='minor', alpha=0.2)
    plt.grid(which='major', alpha=0.5)
    plt.plot(x, y)


if __name__ == '__main__':
    x = np.arange(-10, 10, 0.01)
    activateFunc = ActivateFunc(x)
    activateFunc.b = 1
    PlotActiFunc(x, activateFunc.Sigmoid()[0], title='Sigmoid')
    PlotActiFunc(x, activateFunc.Swish()[0], title='Swish')
    PlotActiFunc(x, activateFunc.ReLU()[0], title='ReLU')
    PlotActiFunc(x, activateFunc.Mish()[0], title='Mish')
    PlotActiFunc(x, activateFunc.Mish()[1], title='Mish-grad')
    PlotActiFunc(x, activateFunc.Swish()[1], title='Swish-grad')

    plt.figure(1)
    PlotMultiFunc(x, activateFunc.Mish()[1])
    PlotMultiFunc(x, activateFunc.Swish()[1])
    plt.legend(['Mish-grad', 'Swish-grad'])
    plt.figure(2)

    PlotMultiFunc(x, activateFunc.Swish()[0])
    PlotMultiFunc(x, activateFunc.Mish()[0])
    plt.legend(['Swish', 'Mish'])

    plt.figure(3)
    PlotMultiFunc(x, activateFunc.Swish()[0])
    PlotMultiFunc(x, activateFunc.Hard_Swish()[0])
    plt.legend(['Swish', 'Hard-Swish'])

    plt.figure(4)
    PlotMultiFunc(x, activateFunc.Sigmoid()[0])
    PlotMultiFunc(x, activateFunc.Hard_Sigmoid()[0])
    plt.legend(['Sigmoid', 'Hard-Sigmoid'])

    plt.figure(5)
    PlotMultiFunc(x, activateFunc.ReLU()[0])
    PlotMultiFunc(x, activateFunc.ReLU6()[0])
    plt.legend(['ReLU', 'ReLU6'])

    plt.figure(6)
    PlotMultiFunc(x, activateFunc.Swish()[1])
    PlotMultiFunc(x, activateFunc.Hard_Swish()[1])
    plt.legend(['Swish-grad', 'Hard-Swish-grad'])

    plt.figure(7)
    PlotMultiFunc(x, activateFunc.Sigmoid()[1])
    PlotMultiFunc(x, activateFunc.Hard_Sigmoid()[1])
    plt.legend(['Sigmoid-grad', 'Hard-Sigmoid-grad'])

    plt.figure(8)
    PlotMultiFunc(x, activateFunc.ReLU()[1])
    PlotMultiFunc(x, activateFunc.ReLU6()[1])
    plt.legend(['ReLU-grad', 'ReLU6-grad'])

    plt.show()

你可能感兴趣的:(深度学习,python,numpy,开发语言)