F.softmax

import torch
import torch.nn.functional as F
 
input = torch.randn(3,4)
print(input)
 
b = F.softmax(input,dim=0) # 按列SoftMax,列和为1
print('b',b)

c = F.softmax(input,dim=-2)   # 按行SoftMax,列和为1
print('c',c)

d = F.softmax(input,dim=1)   # 按行SoftMax,行和为1
print('d',d)

e = F.softmax(input,dim=-1)   # 按行SoftMax,行和为1
print('e',e)

tensor([[ 0.7782, -0.4178, -0.8316, -1.0575],
        [ 1.0279, -0.5515,  1.1533, -2.3556],
        [-1.4882,  0.6435, -1.7345, -1.7422]])
b tensor([[0.4189, 0.2099, 0.1152, 0.5627],
        [0.5377, 0.1836, 0.8382, 0.1536],
        [0.0434, 0.6065, 0.0467, 0.2837]])
c tensor([[0.4189, 0.2099, 0.1152, 0.5627],
        [0.5377, 0.1836, 0.8382, 0.1536],
        [0.0434, 0.6065, 0.0467, 0.2837]])
d tensor([[0.6017, 0.1820, 0.1203, 0.0960],
        [0.4213, 0.0868, 0.4776, 0.0143],
        [0.0910, 0.7672, 0.0712, 0.0706]])
e tensor([[0.6017, 0.1820, 0.1203, 0.0960],
        [0.4213, 0.0868, 0.4776, 0.0143],
        [0.0910, 0.7672, 0.0712, 0.0706]])

你可能感兴趣的:(F.softmax)