From ddf9536dfeb358237219e27c185c729fd4c8537b Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Wed, 7 Feb 2018 17:16:32 -0800 Subject: [PATCH] Remove note about accumulator variables, since those are not added to TRAINABLE_VARIABLES. PiperOrigin-RevId: 184922273 --- tensorflow/python/estimator/warm_starting_util.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/tensorflow/python/estimator/warm_starting_util.py b/tensorflow/python/estimator/warm_starting_util.py index 48110ef..57db968 100644 --- a/tensorflow/python/estimator/warm_starting_util.py +++ b/tensorflow/python/estimator/warm_starting_util.py @@ -117,21 +117,13 @@ class WarmStartSettings( ws = WarmStartSettings(ckpt_to_initialize_from="/tmp/model-1000") ``` - Warm-start only the embeddings (input layer) and their accumulator variables: + Warm-start only the embeddings (input layer): ``` ws = WarmStartSettings(ckpt_to_initialize_from="/tmp", vars_to_warm_start=".*input_layer.*") ``` - Warm-start everything except the optimizer accumulator variables - (DNN defaults to Adagrad): - - ``` - ws = WarmStartSettings(ckpt_to_initialize_from="/tmp", - vars_to_warm_start="^(?!.*(Adagrad))") - ``` - Warm-start all weights but the embedding parameters corresponding to `sc_vocab_file` have a different vocab from the one used in the current model: @@ -423,6 +415,8 @@ def _warm_start(warm_start_settings): # Both warm_start_settings.vars_to_warm_start = '.*' and # warm_start_settings.vars_to_warm_start = None will match everything here. for v in ops.get_collection( + # TODO(eddz): Allow for different collections here (to support + # warm-starting accumulators). ops.GraphKeys.TRAINABLE_VARIABLES, scope=warm_start_settings.vars_to_warm_start): if not isinstance(v, list): -- 2.7.4