手写交叉熵损失

一、定义
二元交叉熵:loss=−1/n ∑​[ylna+(1−y)ln(1−a)]
多元交叉熵:loss=−1/n ∑​y​lna

二、实现

import torch
import torch.nn.functional as F
#创建数据集
y=torch.randint(0,2,size=(10,1)).to(torch.float)
p=torch.rand(size=(10,1),requires_grad=True)
print(y)
print(p)
def cross_entry(p,y):                  
    res=-1*torch.sum(y*torch.log(p)+(1-y)*torch.log(1-p))/y.shape[0]
    return res
print(cross_entry(p,y))
print(F.binary_cross_entropy(p,y))     #二元交叉熵
def mul_cross_entry(p,y):
    p=F.softmax(p)
    res=-1*torch.sum(F.one_hot(y)*torch.log(p+0.000001))/y.shape[0]
    return res
y=torch.randint(0,3,size=(10,),dtype=torch.int64)
p=torch.randn(10,3)
print(mul_cross_entry(p,y))            #多元交叉熵
print(F.cross_entropy(p,y))

你可能感兴趣的:(torch,python,pytorch,深度学习)