Small fix so that GDN can run on TPU
authorA. Unique TensorFlower <gardener@tensorflow.org>
Thu, 24 May 2018 21:59:05 +0000 (14:59 -0700)
committerTensorFlower Gardener <gardener@tensorflow.org>
Thu, 24 May 2018 22:03:39 +0000 (15:03 -0700)
PiperOrigin-RevId: 197959536

tensorflow/contrib/layers/python/layers/layers.py

index 25c3b1e..f708da6 100644 (file)
@@ -1890,6 +1890,7 @@ class GDN(base.Layer):
 
     def beta_initializer(shape, dtype=None, partition_info=None):
       del partition_info  # unused
+      pedestal = array_ops.constant(self._reparam_offset**2, dtype=self.dtype)
       return math_ops.sqrt(array_ops.ones(shape, dtype=dtype) + pedestal)
 
     def gamma_initializer(shape, dtype=None, partition_info=None):
@@ -1897,6 +1898,7 @@ class GDN(base.Layer):
       assert len(shape) == 2
       assert shape[0] == shape[1]
       eye = linalg_ops.eye(shape[0], dtype=dtype)
+      pedestal = array_ops.constant(self._reparam_offset**2, dtype=self.dtype)
       return math_ops.sqrt(self._gamma_init * eye + pedestal)
 
     beta = self.add_variable(