softmax损失函数及其导数代码实现(Python)

损失函数:

# softmax函数及其导数
def softmaxwithloss(inputs, label):
    temp1 = np.exp(inputs)
    probality = temp1 / (np.tile(np.sum(temp1, 1), (inputs.shape[1], 1))).T
    temp2 = np.argmax(label, 1)
    temp3 = [probality[i, j] for (i, j) in zip(np.arange(label.shape[0]), temp2)]
    loss = -1 * np.mean(np.log(temp3))
    return loss

def der_softmaxwithloss(inputs, label):
    temp1 = np.exp(inputs)
    temp2 = np.sum(temp1, 1)
    probability = temp1/(np.tile(temp2, (inputs.shape[1], 1))).T
    gradient = probability - label
    return gradient

 

你可能感兴趣的:(softmax损失函数及其导数代码实现(Python))