From: A. Unique TensorFlower Date: Mon, 5 Feb 2018 23:30:54 +0000 (-0800) Subject: Assign total_loss in order not to crash if training loop exists early. X-Git-Tag: upstream/v1.7.0~31^2~993 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=395550bc423f6a5d9c96233f34f21fce6bd23b4e;p=platform%2Fupstream%2Ftensorflow.git Assign total_loss in order not to crash if training loop exists early. PiperOrigin-RevId: 184596877 --- diff --git a/tensorflow/contrib/slim/python/slim/learning.py b/tensorflow/contrib/slim/python/slim/learning.py index 54362c8..83f3380 100644 --- a/tensorflow/contrib/slim/python/slim/learning.py +++ b/tensorflow/contrib/slim/python/slim/learning.py @@ -738,6 +738,7 @@ def train(train_op, if summary_writer is not None: train_step_kwargs['summary_writer'] = sv.summary_writer + total_loss = 0 should_retry = True while should_retry: try: