Remove note about accumulator variables, since those are not added to TRAINABLE_VARIA...
authorA. Unique TensorFlower <gardener@tensorflow.org>
Thu, 8 Feb 2018 01:16:32 +0000 (17:16 -0800)
committerTensorFlower Gardener <gardener@tensorflow.org>
Thu, 8 Feb 2018 01:20:18 +0000 (17:20 -0800)
PiperOrigin-RevId: 184922273

tensorflow/python/estimator/warm_starting_util.py

index 48110ef..57db968 100644 (file)
@@ -117,21 +117,13 @@ class WarmStartSettings(
   ws = WarmStartSettings(ckpt_to_initialize_from="/tmp/model-1000")
   ```
 
-  Warm-start only the embeddings (input layer) and their accumulator variables:
+  Warm-start only the embeddings (input layer):
 
   ```
   ws = WarmStartSettings(ckpt_to_initialize_from="/tmp",
                          vars_to_warm_start=".*input_layer.*")
   ```
 
-  Warm-start everything except the optimizer accumulator variables
-  (DNN defaults to Adagrad):
-
-  ```
-  ws = WarmStartSettings(ckpt_to_initialize_from="/tmp",
-                         vars_to_warm_start="^(?!.*(Adagrad))")
-  ```
-
   Warm-start all weights but the embedding parameters corresponding to
   `sc_vocab_file` have a different vocab from the one used in the current
   model:
@@ -423,6 +415,8 @@ def _warm_start(warm_start_settings):
   # Both warm_start_settings.vars_to_warm_start = '.*' and
   # warm_start_settings.vars_to_warm_start = None will match everything here.
   for v in ops.get_collection(
+      # TODO(eddz): Allow for different collections here (to support
+      # warm-starting accumulators).
       ops.GraphKeys.TRAINABLE_VARIABLES,
       scope=warm_start_settings.vars_to_warm_start):
     if not isinstance(v, list):