tf.nn.softmax_cross_entropy_with_logits_v2()的用法
tf.nn.softmax_cross_entropy_with_logits_v2(_sentinel, labels, logits, dim, name)
计算 softmax(logits) 和 labels 之间的交叉熵
参数:_sentinel -->内部,一般不使用。
labels --> 真实数据的类别标签
logits -->神经网络最后一层的类别预测输出值
dim --> 类维度。默认为-1,这是最后一个维度。
一般用于交叉熵损失函数的设定:
cross_entropy_loss=tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(logits = y, labels = y_))
import tensorflow as tf
# calculate cross_entropy
y = tf.constant([[1.0, 2.0, 3.0, 4.0],[1.0, 2.0, 3.0, 4.0],[1.0, 2.0, 3.0, 4.0]])
y_=tf.constant([[0.0, 0.0, 0.0, 1.0],[0.0, 0.0, 0.0, 1.0],[0.0, 0.0, 0.0, 1.0]])
ysoft = tf.nn.softmax(y)
cross_entropy = -tf.reduce_sum(y_*tf.log(ysoft))
#do cross_entropy just one step
cross_entropy2=tf.reduce_sum(tf.nn.softmax_cross_entropy_with_logits_v2(logits = y, labels = y_))
cross_entropy_loss=tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(logits = y, labels = y_))
with tf.Session() as sess:
print("step1:softmax result=")
print(sess.run(ysoft))
print("step2:cross_entropy result=")
print(sess.run(cross_entropy))
print("Function(softmax_cross_entropy_with_logits) result=")
print(sess.run(cross_entropy2))
print("cross_entropy_loss result=")
print(sess.run(cross_entropy_loss))
step1:softmax result=
[[ 0.0320586 0.08714432 0.23688284 0.64391428]
[ 0.0320586 0.08714432 0.23688284 0.64391428]
[ 0.0320586 0.08714432 0.23688284 0.64391428]]
step2:cross_entropy result=
1.32057
Function(softmax_cross_entropy_with_logits) result=
1.32057
cross_entropy_loss result=
0.44019