1.binary_crossentropy
# Loss最终用binary_crossentropy loss,sigmoid激活后放到一个logit的loss里
loss = tf.reduce_mean(tf.keras.losses.binary_crossentropy(labels, pred))
2.sigmoid_cross_entropy_with_logits 不行
# # 第一种loss, sigmoid_cross_entropy_with_logits输入为未激活的值
# loss_h = tf.reduce_mean(
# tf.nn.sigmoid_cross_entropy_with_logits(logits=pred[:, :, :, 0:1], labels=labels[:, :, :, 0:1]))
# loss_s = tf.reduce_mean(
# tf.nn.sigmoid_cross_entropy_with_logits(logits=pred[:, :, :, 1:2], labels=labels[:, :, :, 1:2]))
# loss = loss_h + loss_s
3.sigmoid_cross_entropy 不行
# # 第三种loss 与第一种一模一样
# loss = tf.compat.v1.losses.sigmoid_cross_entropy(labels, pred)
文章出处登录后可见!
已经登录?立即刷新