from tensorflow.python.util import compat
from tensorflow.python.util import compat_internal
from tensorflow.python.util import nest
+from tensorflow.python.util.tf_export import tf_export
_VALID_MODEL_FN_ARGS = set(
['features', 'labels', 'mode', 'params', 'self', 'config'])
+@tf_export('estimator.Estimator')
class Estimator(object):
"""Estimator class to train and evaluate TensorFlow models.
def _assert_members_are_not_overridden(self):
"""Asserts members of `Estimator` are not overridden."""
- allowed_overrides = set(['_call_input_fn', '_create_global_step',
- '_convert_train_steps_to_hooks',
- '_convert_eval_steps_to_hooks'])
+ allowed_overrides = set([
+ '_call_input_fn', '_create_global_step',
+ '_convert_train_steps_to_hooks', '_convert_eval_steps_to_hooks',
+ '_tf_api_names'
+ ])
estimator_members = set([m for m in Estimator.__dict__.keys()
if not m.startswith('__')])
subclass_members = set(self.__class__.__dict__.keys())
from tensorflow.python.framework import errors_impl
from tensorflow.python.platform import gfile
from tensorflow.python.platform import tf_logging
+from tensorflow.python.util.tf_export import tf_export
+@tf_export('estimator.Exporter')
class Exporter(object):
"""A class representing a type of model export."""
return export_result
+@tf_export('estimator.FinalExporter')
class FinalExporter(Exporter):
"""This class exports the serving graph and checkpoints in the end.
is_the_final_export)
+@tf_export('estimator.LatestExporter')
class LatestExporter(Exporter):
"""This class regularly exports the serving graph and checkpoints.
from tensorflow.python.training import monitored_session
from tensorflow.python.training import session_run_hook
from tensorflow.python.util import nest
+from tensorflow.python.util.tf_export import tf_export
+@tf_export('estimator.ModeKeys')
class ModeKeys(object):
"""Standard names for model modes.
AVERAGE_LOSS_METRIC_KEY = 'average_loss'
+@tf_export('estimator.EstimatorSpec')
class EstimatorSpec(
collections.namedtuple('EstimatorSpec', [
'mode', 'predictions', 'loss', 'train_op', 'eval_metric_ops',
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import server_lib
from tensorflow.python.util import compat_internal
+from tensorflow.python.util.tf_export import tf_export
_USE_DEFAULT = object()
EVALUATOR = 'evaluator'
+@tf_export('estimator.RunConfig')
class RunConfig(object):
"""This class specifies the configurations for an `Estimator` run."""
from tensorflow.python.training import server_lib
from tensorflow.python.training import session_run_hook
from tensorflow.python.util import compat
+from tensorflow.python.util.tf_export import tf_export
_MAX_DELAY_SECS = 60
_DELAY_SECS_PER_WORKER = 5
return tf_config.get(_ENVIRONMENT_KEY) == _ENVIRONMENT_GOOGLE_VALUE
+@tf_export('estimator.TrainSpec')
class TrainSpec(
collections.namedtuple('TrainSpec', ['input_fn', 'max_steps', 'hooks'])):
"""Configuration for the "train" part for the `train_and_evaluate` call.
cls, input_fn=input_fn, max_steps=max_steps, hooks=hooks)
+@tf_export('estimator.EvalSpec')
class EvalSpec(
collections.namedtuple('EvalSpec', [
'input_fn', 'steps', 'name', 'hooks', 'exporters', 'start_delay_secs',
throttle_secs=throttle_secs)
+@tf_export('estimator.train_and_evaluate')
def train_and_evaluate(estimator, train_spec, eval_spec):
"""Train and evaluate the `estimator`.
from tensorflow.python.training import checkpoint_ops
from tensorflow.python.training import checkpoint_utils
from tensorflow.python.training import saver
+from tensorflow.python.util.tf_export import tf_export
+@tf_export("estimator.VocabInfo")
class VocabInfo(
collections.namedtuple("VocabInfo", [
"new_vocab",
)
+@tf_export("estimator.WarmStartSettings")
class WarmStartSettings(
collections.namedtuple("WarmStartSettings", [
"ckpt_to_initialize_from",
import numpy as np
from tensorflow.python.keras._impl.keras.utils.data_utils import get_file
+from tensorflow.python.util.tf_export import tf_export
+@tf_export('keras.datasets.boston_housing.load_data')
def load_data(path='boston_housing.npz', test_split=0.2, seed=113):
"""Loads the Boston Housing dataset.
from tensorflow.python.keras._impl.keras import backend as K
from tensorflow.python.keras._impl.keras.datasets.cifar import load_batch
from tensorflow.python.keras._impl.keras.utils.data_utils import get_file
+from tensorflow.python.util.tf_export import tf_export
+@tf_export('keras.datasets.cifar10.load_data')
def load_data():
"""Loads CIFAR10 dataset.
from tensorflow.python.keras._impl.keras import backend as K
from tensorflow.python.keras._impl.keras.datasets.cifar import load_batch
from tensorflow.python.keras._impl.keras.utils.data_utils import get_file
+from tensorflow.python.util.tf_export import tf_export
+@tf_export('keras.datasets.cifar100.load_data')
def load_data(label_mode='fine'):
"""Loads CIFAR100 dataset.
from tensorflow.python.keras._impl.keras.preprocessing.sequence import _remove_long_seq
from tensorflow.python.keras._impl.keras.utils.data_utils import get_file
from tensorflow.python.platform import tf_logging as logging
+from tensorflow.python.util.tf_export import tf_export
+@tf_export('keras.datasets.imdb.load_data')
def load_data(path='imdb.npz',
num_words=None,
skip_top=0,
return (x_train, y_train), (x_test, y_test)
+@tf_export('keras.datasets.imdb.get_word_index')
def get_word_index(path='imdb_word_index.json'):
"""Retrieves the dictionary mapping word indices back to words.
import numpy as np
from tensorflow.python.keras._impl.keras.utils.data_utils import get_file
+from tensorflow.python.util.tf_export import tf_export
+@tf_export('keras.datasets.mnist.load_data')
def load_data(path='mnist.npz'):
"""Loads the MNIST dataset.
from tensorflow.python.keras._impl.keras.preprocessing.sequence import _remove_long_seq
from tensorflow.python.keras._impl.keras.utils.data_utils import get_file
from tensorflow.python.platform import tf_logging as logging
+from tensorflow.python.util.tf_export import tf_export
+@tf_export('keras.datasets.reuters.load_data')
def load_data(path='reuters.npz',
num_words=None,
skip_top=0,
return (x_train, y_train), (x_test, y_test)
+@tf_export('keras.datasets.reuters.get_word_index')
def get_word_index(path='reuters_word_index.json'):
"""Retrieves the dictionary mapping word indices back to words.
from tensorflow.python.ops import variables as tf_variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import nest
+from tensorflow.python.util.tf_export import tf_export
+@tf_export('layers.Layer')
class Layer(object):
"""Base layer class.
', found shape=' + str(shape))
+@tf_export('keras.layers.InputSpec', 'layers.InputSpec')
class InputSpec(object):
"""Specifies the ndim, dtype and shape of every input to a layer.
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import nn_ops
+from tensorflow.python.util.tf_export import tf_export
class _Conv(base.Layer):
new_space)
+@tf_export('layers.Conv1D')
class Conv1D(_Conv):
"""1D convolution layer (e.g. temporal convolution).
name=name, **kwargs)
+@tf_export('layers.conv1d')
def conv1d(inputs,
filters,
kernel_size,
return layer.apply(inputs)
+@tf_export('layers.Conv2D')
class Conv2D(_Conv):
"""2D convolution layer (e.g. spatial convolution over images).
name=name, **kwargs)
+@tf_export('layers.conv2d')
def conv2d(inputs,
filters,
kernel_size,
return layer.apply(inputs)
+@tf_export('layers.Conv3D')
class Conv3D(_Conv):
"""3D convolution layer (e.g. spatial convolution over volumes).
name=name, **kwargs)
+@tf_export('layers.conv3d')
def conv3d(inputs,
filters,
kernel_size,
raise NotImplementedError
+@tf_export('layers.SeparableConv1D')
class SeparableConv1D(_SeparableConv):
"""Depthwise separable 1D convolution.
return outputs
+@tf_export('layers.SeparableConv2D')
class SeparableConv2D(_SeparableConv):
"""Depthwise separable 2D convolution.
return outputs
+@tf_export('layers.separable_conv1d')
def separable_conv1d(inputs,
filters,
kernel_size,
return layer.apply(inputs)
+@tf_export('layers.separable_conv2d')
def separable_conv2d(inputs,
filters,
kernel_size,
return layer.apply(inputs)
+@tf_export('layers.Conv2DTranspose')
class Conv2DTranspose(Conv2D):
"""Transposed 2D convolution layer (sometimes called 2D Deconvolution).
return tensor_shape.TensorShape(output_shape)
+@tf_export('layers.conv2d_transpose')
def conv2d_transpose(inputs,
filters,
kernel_size,
return layer.apply(inputs)
+@tf_export('layers.Conv3DTranspose')
class Conv3DTranspose(Conv3D):
"""Transposed 3D convolution layer (sometimes called 3D Deconvolution).
return tensor_shape.TensorShape(output_shape)
+@tf_export('layers.conv3d_transpose')
def conv3d_transpose(inputs,
filters,
kernel_size,
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import standard_ops
+from tensorflow.python.util.tf_export import tf_export
+@tf_export('layers.Dense')
class Dense(base.Layer):
"""Densely-connected layer class.
return input_shape[:-1].concatenate(self.units)
+@tf_export('layers.dense')
def dense(
inputs, units,
activation=None,
return layer.apply(inputs)
+@tf_export('layers.Dropout')
class Dropout(base.Layer):
"""Applies Dropout to the input.
return input_shape
+@tf_export('layers.dropout')
def dropout(inputs,
rate=0.5,
noise_shape=None,
return layer.apply(inputs, training=training)
+@tf_export('layers.Flatten')
class Flatten(base.Layer):
"""Flattens an input tensor while preserving the batch axis (axis 0).
return tensor_shape.TensorShape(output_shape)
+@tf_export('layers.flatten')
def flatten(inputs, name=None):
"""Flattens an input tensor while preserving the batch axis (axis 0).
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import nest
+from tensorflow.python.util.tf_export import tf_export
class InputLayer(base.Layer):
output_tensors=[input_tensor])
+@tf_export('layers.Input')
def Input( # pylint: disable=invalid-name
shape=None,
batch_size=None,
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.training import moving_averages
+from tensorflow.python.util.tf_export import tf_export
+@tf_export('layers.BatchNormalization')
class BatchNormalization(base.Layer):
"""Batch Normalization layer from http://arxiv.org/abs/1502.03167.
return input_shape
+@tf_export('layers.batch_normalization')
def batch_normalization(inputs,
axis=-1,
momentum=0.99,
from tensorflow.python.layers import utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import nn
+from tensorflow.python.util.tf_export import tf_export
class _Pooling1D(base.Layer):
return tensor_shape.TensorShape([input_shape[0], length, input_shape[2]])
+@tf_export('layers.AveragePooling1D')
class AveragePooling1D(_Pooling1D):
"""Average Pooling layer for 1D inputs.
**kwargs)
+@tf_export('layers.average_pooling1d')
def average_pooling1d(inputs, pool_size, strides,
padding='valid', data_format='channels_last',
name=None):
return layer.apply(inputs)
+@tf_export('layers.MaxPooling1D')
class MaxPooling1D(_Pooling1D):
"""Max Pooling layer for 1D inputs.
**kwargs)
+@tf_export('layers.max_pooling1d')
def max_pooling1d(inputs, pool_size, strides,
padding='valid', data_format='channels_last',
name=None):
[input_shape[0], rows, cols, input_shape[3]])
+@tf_export('layers.AveragePooling2D')
class AveragePooling2D(_Pooling2D):
"""Average pooling layer for 2D inputs (e.g. images).
padding=padding, data_format=data_format, name=name, **kwargs)
+@tf_export('layers.average_pooling2d')
def average_pooling2d(inputs,
pool_size, strides,
padding='valid', data_format='channels_last',
return layer.apply(inputs)
+@tf_export('layers.MaxPooling2D')
class MaxPooling2D(_Pooling2D):
"""Max pooling layer for 2D inputs (e.g. images).
padding=padding, data_format=data_format, name=name, **kwargs)
+@tf_export('layers.max_pooling2d')
def max_pooling2d(inputs,
pool_size, strides,
padding='valid', data_format='channels_last',
[input_shape[0], len_dim1, len_dim2, len_dim3, input_shape[4]])
+@tf_export('layers.AveragePooling3D')
class AveragePooling3D(_Pooling3D):
"""Average pooling layer for 3D inputs (e.g. volumes).
padding=padding, data_format=data_format, name=name, **kwargs)
+@tf_export('layers.average_pooling3d')
def average_pooling3d(inputs,
pool_size, strides,
padding='valid', data_format='channels_last',
return layer.apply(inputs)
+@tf_export('layers.MaxPooling3D')
class MaxPooling3D(_Pooling3D):
"""Max pooling layer for 3D inputs (e.g. volumes).
padding=padding, data_format=data_format, name=name, **kwargs)
+@tf_export('layers.max_pooling3d')
def max_pooling3d(inputs,
pool_size, strides,
padding='valid', data_format='channels_last',
"api/keras/utils/__init__.py",
"api/keras/wrappers/__init__.py",
"api/keras/wrappers/scikit_learn/__init__.py",
+ "api/layers/__init__.py",
"api/linalg/__init__.py",
"api/logging/__init__.py",
"api/losses/__init__.py",