From 283f03c825312efd3319cb37dc1a412288a536ec Mon Sep 17 00:00:00 2001 From: ImSheridan Date: Tue, 6 Feb 2018 08:15:33 +0800 Subject: [PATCH] Fix the mean the description of the output shape, which should be [batch_size, 1, max_time] (#16642) --- tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py b/tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py index d6b5ece..0a53fd6 100644 --- a/tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py +++ b/tensorflow/contrib/seq2seq/python/ops/attention_wrapper.py @@ -331,7 +331,7 @@ def _luong_score(query, keys, scale): # batched matmul on: # [batch_size, 1, depth] . [batch_size, depth, max_time] # resulting in an output shape of: - # [batch_time, 1, max_time]. + # [batch_size, 1, max_time]. # we then squeeze out the center singleton dimension. score = math_ops.matmul(query, keys, transpose_b=True) score = array_ops.squeeze(score, [1]) -- 2.7.4