From: A. Unique TensorFlower Date: Thu, 8 Feb 2018 01:16:32 +0000 (-0800) Subject: Remove note about accumulator variables, since those are not added to TRAINABLE_VARIA... X-Git-Tag: upstream/v1.7.0~31^2~902 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=ddf9536dfeb358237219e27c185c729fd4c8537b;p=platform%2Fupstream%2Ftensorflow.git Remove note about accumulator variables, since those are not added to TRAINABLE_VARIABLES. PiperOrigin-RevId: 184922273 --- diff --git a/tensorflow/python/estimator/warm_starting_util.py b/tensorflow/python/estimator/warm_starting_util.py index 48110ef..57db968 100644 --- a/tensorflow/python/estimator/warm_starting_util.py +++ b/tensorflow/python/estimator/warm_starting_util.py @@ -117,21 +117,13 @@ class WarmStartSettings( ws = WarmStartSettings(ckpt_to_initialize_from="/tmp/model-1000") ``` - Warm-start only the embeddings (input layer) and their accumulator variables: + Warm-start only the embeddings (input layer): ``` ws = WarmStartSettings(ckpt_to_initialize_from="/tmp", vars_to_warm_start=".*input_layer.*") ``` - Warm-start everything except the optimizer accumulator variables - (DNN defaults to Adagrad): - - ``` - ws = WarmStartSettings(ckpt_to_initialize_from="/tmp", - vars_to_warm_start="^(?!.*(Adagrad))") - ``` - Warm-start all weights but the embedding parameters corresponding to `sc_vocab_file` have a different vocab from the one used in the current model: @@ -423,6 +415,8 @@ def _warm_start(warm_start_settings): # Both warm_start_settings.vars_to_warm_start = '.*' and # warm_start_settings.vars_to_warm_start = None will match everything here. for v in ops.get_collection( + # TODO(eddz): Allow for different collections here (to support + # warm-starting accumulators). ops.GraphKeys.TRAINABLE_VARIABLES, scope=warm_start_settings.vars_to_warm_start): if not isinstance(v, list):