From a7c225c89e4169492f0eef57c913463b976dd44e Mon Sep 17 00:00:00 2001 From: Yong Tang Date: Wed, 7 Feb 2018 16:24:34 +0000 Subject: [PATCH] Remove warnings in tf.losses.softmax_cross_entropy This fix tries to address the issue raised in 16534 where tf.losses.softmax_cross_entropy causes warnings due to the calling of tf.nn.softmax_cross_entropy_with_logits. This fix switches to tf.nn.softmax_cross_entropy_with_logits_v2 to remove the warning. This fix fixes 16534. Signed-off-by: Yong Tang --- tensorflow/python/ops/losses/losses_impl.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tensorflow/python/ops/losses/losses_impl.py b/tensorflow/python/ops/losses/losses_impl.py index 8b3c61b..0907ea6 100644 --- a/tensorflow/python/ops/losses/losses_impl.py +++ b/tensorflow/python/ops/losses/losses_impl.py @@ -726,9 +726,12 @@ def softmax_cross_entropy( smooth_negatives = label_smoothing / num_classes onehot_labels = onehot_labels * smooth_positives + smooth_negatives - losses = nn.softmax_cross_entropy_with_logits(labels=onehot_labels, - logits=logits, - name="xentropy") + onehot_labels = array_ops.stop_gradient( + onehot_labels, name="labels_stop_gradient") + losses = nn.softmax_cross_entropy_with_logits_v2( + labels=onehot_labels, logits=logits, name="xentropy") + + return compute_weighted_loss( losses, weights, scope, loss_collection, reduction=reduction) -- 2.7.4