Fix pylint issue
authorYong Tang <yong.tang.github@outlook.com>
Sun, 1 Apr 2018 02:08:53 +0000 (02:08 +0000)
committerYong Tang <yong.tang.github@outlook.com>
Mon, 16 Apr 2018 20:04:11 +0000 (20:04 +0000)
Signed-off-by: Yong Tang <yong.tang.github@outlook.com>
tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py

index 867e49b..a0f5741 100644 (file)
@@ -472,7 +472,8 @@ def _bahdanau_score(processed_query, keys, normalize):
     # Scalar used in weight normalization
     g = variable_scope.get_variable(
         "attention_g", dtype=dtype,
-        initializer=init_ops.constant_initializer(math.sqrt((1. / num_units))), shape=())
+        initializer=init_ops.constant_initializer(math.sqrt((1. / num_units))),
+        shape=())
     # Bias added prior to the nonlinearity
     b = variable_scope.get_variable(
         "attention_b", [num_units], dtype=dtype,