self.assertAllClose(y, ordered.forward(x).eval())
self.assertAllClose(x, ordered.inverse(y).eval())
self.assertAllClose(
- -np.sum(y[..., 1:], axis=-1),
+ np.sum(np.asarray(y)[..., 1:], axis=-1),
ordered.inverse_log_det_jacobian(y, event_ndims=1).eval(),
atol=0.,
rtol=1e-7)
self.assertAllClose(real_x, ordered.inverse(y).eval(
feed_dict={y: real_y}))
self.assertAllClose(
- -np.sum(y[..., 1:], axis=-1),
+ np.sum(np.asarray(real_y)[..., 1:], axis=-1),
ordered.inverse_log_det_jacobian(y, event_ndims=1).eval(
feed_dict={y: real_y}),
atol=0.,
def testBijectiveAndFinite(self):
with self.test_session():
ordered = Ordered()
- x = np.linspace(-50, 50, num=10).reshape(5, 2).astype(np.float32)
- # Make y values on the simplex with a wide range.
- y_0 = np.ones(5).astype(np.float32)
- y_1 = (1e-5 * rng.rand(5)).astype(np.float32)
- y_2 = (1e1 * rng.rand(5)).astype(np.float32)
- y = np.array([y_0, y_1, y_2])
- y /= y.sum(axis=0)
- y = y.T # y.shape = [5, 3]
+ x = np.sort(rng.randn(3, 10), axis=-1).astype(np.float32)
+ y = (rng.randn(3, 10)).astype(np.float32)
assert_bijective_and_finite(ordered, x, y, event_ndims=1)
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
-from tensorflow.python.ops import nn_ops
from tensorflow.python.ops.distributions import bijector
"""Bijector which maps a tensor x_k that has increasing elements in the last
dimension to an unconstrained tensor y_k.
- The inverse of the bijector applied to a normal random vector `X ~ N(0, 1)`
- gives back a sorted random vector with the same distribution `Y ~ N(0, 1)`
+ The inverse of the bijector applied to a normal random vector `y ~ N(0, 1)`
+ gives back a sorted random vector with the same distribution `x ~ N(0, 1)`
+ where `x = sort(y)`
On the last dimension of the tensor, Ordered bijector performs:
`y[0] = x[0]`
Example Use:
```python
- bijector.Ordered().forward(tf.log([2, 3, 4]))
- # Result: [0.6931472, 3.6931472, 7.693147]
+ bijector.Ordered().forward([2, 3, 4])
+ # Result: [2., 0., 0.]
- bijector.Ordered().inverse([0.2, 0.3, 0.4])
- # Result: tf.log([2, 3, 4])
+ bijector.Ordered().inverse([0.06428002, -1.07774478, -0.71530371])
+ # Result: [0.06428002, 0.40464228, 0.8936858]
```
"""
return math_ops.reduce_sum(y[..., 1:], axis=-1)
def _forward_log_det_jacobian(self, x):
- pass
+ return -math_ops.reduce_sum(
+ math_ops.log(x[..., 1:] - x[..., :-1]),
+ axis=-1)
def _maybe_assert_valid_x(self, x):
if not self.validate_args: