From d744b314682d2313bd3e8ffe0b34e022cbeacb7b Mon Sep 17 00:00:00 2001 From: Yong Tang Date: Sun, 1 Apr 2018 02:08:53 +0000 Subject: [PATCH] Fix pylint issue Signed-off-by: Yong Tang --- tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py b/tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py index 867e49b..a0f5741 100644 --- a/tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py +++ b/tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py @@ -472,7 +472,8 @@ def _bahdanau_score(processed_query, keys, normalize): # Scalar used in weight normalization g = variable_scope.get_variable( "attention_g", dtype=dtype, - initializer=init_ops.constant_initializer(math.sqrt((1. / num_units))), shape=()) + initializer=init_ops.constant_initializer(math.sqrt((1. / num_units))), + shape=()) # Bias added prior to the nonlinearity b = variable_scope.get_variable( "attention_b", [num_units], dtype=dtype, -- 2.7.4