torch.nn.functional.normalize
a1=torch.nn.functional.normalize(a,p=1, dim=1)
a1
Out[283]:
tensor([[[ 0.2748, -0.9671, 0.3415, -0.1457],
[-0.2564, 0.0282, -0.5989, 0.6267],
[-0.4687, -0.0047, -0.0596, -0.2275]],
[[ 0.0069, 0.3461, 0.3227, 0.3084],
[-0.5909, -0.5238, -0.1853, -0.2525],
[-0.4022, 0.1301, -0.4920, 0.4391]]])
a1=torch.nn.functional.normalize(a,p=2, dim=0)
a1
Out[285]:
tensor([[[ 0.9993, -0.8100, 0.6330, -0.7194],
[-0.2815, 0.0266, -0.9283, 0.9835],
[-0.6189, -0.0179, -0.0932, -0.7505]],
[[ 0.0371, 0.5865, 0.7741, 0.6946],
[-0.9596, -0.9996, -0.3718, -0.1808],
[-0.7855, 0.9998, -0.9956, 0.6609]]])
softmax
b=torch.randn((2,3,4))
b
Out[345]:
tensor([[[ 0.9075, 1.3613, -0.9740, -1.2543],
[ 0.4870, -0.0040, -1.5290, -0.1971],
[ 0.3198, 2.0391, -0.6837, 0.3684]],
[[ 0.2504, -0.1802, -0.9951, 0.6733],
[-1.0617, -1.4770, -1.3032, 0.0329],
[ 1.1119, -0.5354, 0.6401, -0.1070]]])
b1=torch.nn.functional.softmax(b, dim=0)
b1
Out[347]:
tensor([[[0.6586, 0.8237, 0.5053, 0.1270],
[0.8247, 0.8135, 0.4438, 0.4427],
[0.3117, 0.9292, 0.2102, 0.6167]],
[[0.3414, 0.1763, 0.4947, 0.8730],
[0.1753, 0.1865, 0.5562, 0.5573],
[0.6883, 0.0708, 0.7898, 0.3833]]])
0.6586+0.3414
Out[351]: 1.0
a
Out[354]:
tensor([[[ 1., 2., 3.],
[ 4., 5., 6.]],
[[ 7., 8., 9.],
[10., 11., 12.]]])
a1=torch.nn.functional.softmax(a, dim=0)
a1
Out[356]:
tensor([[[0.0025, 0.0025, 0.0025],
[0.0025, 0.0025, 0.0025]],
[[0.9975, 0.9975, 0.9975],
[0.9975, 0.9975, 0.9975]]])
a1= torch.nn.functional.softmax(a, dim=1)
a1
Out[337]:
tensor([[[0.0474, 0.0474, 0.0474],
[0.9526, 0.9526, 0.9526]],
[[0.0474, 0.0474, 0.0474],
[0.9526, 0.9526, 0.9526]]])
0.9526+0.0474
Out[338]: 1.0
a1= torch.nn.functional.softmax(a, dim=2)
a1
Out[333]:
tensor([[[0.0900, 0.2447, 0.6652],
[0.0900, 0.2447, 0.6652]],
[[0.0900, 0.2447, 0.6652],
[0.0900, 0.2447, 0.6652]]])