[layer] Support prefix for sharing weight names
authorParichay Kapoor <pk.kapoor@samsung.com>
Mon, 1 Nov 2021 01:13:16 +0000 (10:13 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Mon, 1 Nov 2021 23:27:03 +0000 (08:27 +0900)
This patch allows support for prefix for sharing weight names across
layers, so that layers which should share their weights can do so by
using the same prefix.

Signed-off-by: Parichay Kapoor <pk.kapoor@samsung.com>
nntrainer/layers/layer_context.h

index cbfcf39..e41ac80 100644 (file)
@@ -47,10 +47,13 @@ public:
     input_dim(dim),
     in_place(in_place_),
     num_outputs(num_out),
-    name(n) {
+    name(n),
+    prefix("") {
     NNTR_THROW_IF(!validate(), std::invalid_argument)
       << "Invalid init context name: " << name
       << " num inputs: " << getNumInputs();
+    if (prefix.empty())
+      prefix = name; // default prefix is the name
   }
 
   /**
@@ -143,7 +146,7 @@ public:
                              const WeightRegularizer reg, const float reg_const,
                              const std::string &name, bool trainable = true) {
     weights_spec.emplace_back(dim, init, reg, reg_const, trainable,
-                              getName() + ":" + name);
+                              prefix + ":" + name);
     return weights_spec.size() - 1;
   }
 
@@ -178,7 +181,7 @@ public:
                 const Tensor::Initializer init = Tensor::Initializer::NONE,
                 bool trainable = false,
                 TensorLifespan lifespan = TensorLifespan::ITERATION_LIFESPAN) {
-    tensors_spec.emplace_back(dim, init, trainable, getName() + ":" + name,
+    tensors_spec.emplace_back(dim, init, trainable, prefix + ":" + name,
                               lifespan);
     return tensors_spec.size() - 1;
   }
@@ -284,6 +287,7 @@ private:
 
   unsigned int num_outputs; /**< number of outputs for the layer */
   std::string name;         /**< name of the layer */
+  std::string prefix;       /**< prefix of the layer */
 };
 
 /**