# ==============================================================================
"""Functions for specifying custom gradients.
-See ${python/contrib.bayesflow.custom_gradient}.
+See @{tf.contrib.bayesflow.custom_grad.custom_gradient}.
"""
from __future__ import absolute_import
# ==============================================================================
"""Probabilistic neural layers.
-See ${python/contrib.bayesflow.layers}.
+See @{tf.contrib.bayesflow.layers}.
"""
from __future__ import absolute_import
# ==============================================================================
"""Probabilistic optimizer modules.
-See ${python/contrib.bayesflow.optimizers}.
+See @{tf.contrib.bayesflow.optimizers}.
"""
from __future__ import absolute_import
def add_metrics(estimator, metric_fn):
- """Creates a new ${tf.estimator.Estimator} which has given metrics.
+ """Creates a new @{tf.estimator.Estimator} which has given metrics.
Example:
```
Args:
- estimator: A ${tf.estimator.Estimator} object.
+ estimator: A @{tf.estimator.Estimator} object.
metric_fn: A function which should obey the following signature:
- Args: can only have following four arguments in any order:
* predictions: Predictions `Tensor` or dict of `Tensor` created by given
function, namely a `(metric_tensor, update_op)` tuple.
Returns:
- A new ${tf.estimator.Estimator} which has a union of original metrics with
+ A new @{tf.estimator.Estimator} which has a union of original metrics with
given ones.
"""
_verify_metric_fn_args(metric_fn)
```
Args:
- estimator: A ${tf.estimator.Estimator} object.
+ estimator: A @{tf.estimator.Estimator} object.
keys: a `string` or a `list` of `string`. If it is `None`, all of the
`features` in `dict` is forwarded to the `predictions`. If it is a
`string`, only given key is forwarded. If it is a `list` of strings, all
the given `keys` are forwarded.
Returns:
- A new ${tf.estimator.Estimator} which forwards features to predictions.
+ A new @{tf.estimator.Estimator} which forwards features to predictions.
Raises:
ValueError:
`InputFnOps`.
default_output_alternative_key: the name of the head to serve when an
incoming serving request does not explicitly request a specific head.
- Must be `None` if the estimator inherits from ${tf.estimator.Estimator}
+ Must be `None` if the estimator inherits from @{tf.estimator.Estimator}
or for single-headed models.
assets_extra: A dict specifying how to populate the assets.extra directory
within the exported SavedModel. Each key should give the destination
The string path to the exported directory.
Raises:
- ValueError: If `estimator` is a ${tf.estimator.Estimator} instance
+ ValueError: If `estimator` is a @{tf.estimator.Estimator} instance
and `default_output_alternative_key` was specified.
"""
if isinstance(estimator, core_estimator.Estimator):
that must be provided at serving time (excluding labels!).
default_output_alternative_key: the name of the head to serve when an
incoming serving request does not explicitly request a specific head.
- Must be `None` if the estimator inherits from ${tf.estimator.Estimator}
+ Must be `None` if the estimator inherits from @{tf.estimator.Estimator}
or for single-headed models.
assets_extra: A dict specifying how to populate the assets.extra directory
within the exported SavedModel. Each key should give the destination
The string path to the SavedModel indicated by post_export_fn.
Raises:
- ValueError: If `estimator` is a ${tf.estimator.Estimator} instance
+ ValueError: If `estimator` is a @{tf.estimator.Estimator} instance
and `default_output_alternative_key` was specified or if post_export_fn
does not return a valid directory.
RuntimeError: If unable to create temporary or final export directory.
cores. This is required by model-parallelism which enables partitioning
the model to multiple cores. For example, [2, 2, 1] means the model is
partitioned across 4 cores which span two cores in both x and y
- coordinates. Please refer to ${tf.contrib.tpu.TopologyProto} for the
+ coordinates. Please refer to @{tf.contrib.tpu.Topology} for the
geometry of a TPU mesh.
per_host_input_for_training: If `True`, `input_fn` is invoked Per-Host
rather than Per-Core. With Per-Host input pipeline deployment, `input_fn`
"""Creates the global step tensor in graph.
The global step tensor must be an integer type with name 'global_step' and
- be added to the collection ${tf.GraphKeys.GLOBAL_STEP}.
+ be added to the collection @{tf.GraphKeys.GLOBAL_STEP}.
Args:
graph: The graph in which to create the global step tensor.
FeatureColumns provide a high level abstraction for ingesting and representing
features. FeatureColumns are also the primary way of encoding features for
-canned ${tf.estimator.Estimator}s.
+canned @{tf.estimator.Estimator}s.
When using FeatureColumns with `Estimators`, the type of feature column you
should choose depends on (1) the feature type and (2) the model type.
It is used for get_parsing_spec for `tf.parse_example`. Returned spec is a
dict from keys ('string') to `VarLenFeature`, `FixedLenFeature`, and other
- supported objects. Please check documentation of ${tf.parse_example} for all
+ supported objects. Please check documentation of @{tf.parse_example} for all
supported spec objects.
Let's say a Feature column depends on raw feature ('raw') and another
weight_collections: List of graph collections to which Variables (if any
will be created) are added.
trainable: If `True` also add variables to the graph collection
- `GraphKeys.TRAINABLE_VARIABLES` (see ${tf.Variable}).
+ `GraphKeys.TRAINABLE_VARIABLES` (see @{tf.Variable}).
Returns:
`Tensor` of shape [batch_size] + `_variable_shape`.
WARNING: Do not subclass this layer unless you know what you are doing:
the API is subject to future changes.
- A categorical feature typically handled with a ${tf.SparseTensor} of IDs.
+ A categorical feature typically handled with a @{tf.SparseTensor} of IDs.
"""
__metaclass__ = abc.ABCMeta
weight_collections: List of graph collections to which variables (if any
will be created) are added.
trainable: If `True` also add variables to the graph collection
- `GraphKeys.TRAINABLE_VARIABLES` (see ${tf.get_variable}).
+ `GraphKeys.TRAINABLE_VARIABLES` (see @{tf.get_variable}).
"""
pass
"""Creates a slice helper object given a variable.
This allows creating a sub-tensor from part of the current contents
- of a variable. See ${tf.Tensor$`Tensor.__getitem__`}
- for detailed examples of slicing.
+ of a variable. See @{tf.Tensor.__getitem__} for detailed examples
+ of slicing.
This function in addition also allows assignment to a sliced range.
This is similar to `__setitem__` functionality in Python. However,
class ResourceVariable(variables.Variable):
"""Variable based on resource handles.
- See the ${variables} documentation for more details.
+ See the @{$python/state_ops$`Variables`} documentation for more details.
A `ResourceVariable` allows you to maintain state across subsequent calls to
session.run.
"""A training helper that checkpoints models and computes summaries.
This class is deprecated. Please use
- ${tf.train.MonitoredTrainingSession} instead.
+ @{tf.train.MonitoredTrainingSession} instead.
The Supervisor is a small wrapper around a `Coordinator`, a `Saver`,
and a `SessionManager` that takes care of common needs of TensorFlow