[nn] Apply previous input realizer
authorJihoon Lee <jhoon.it.lee@samsung.com>
Thu, 18 Nov 2021 08:57:11 +0000 (17:57 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Mon, 29 Nov 2021 04:05:59 +0000 (13:05 +0900)
This patch applies previous input realizer.

**Self evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test: [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Jihoon Lee <jhoon.it.lee@samsung.com>
api/ccapi/include/model.h
jni/Android.mk
nntrainer/graph/network_graph.cpp
nntrainer/graph/network_graph.h
nntrainer/models/neuralnet.cpp
test/unittest/compiler/unittest_interpreter.cpp
test/unittest/compiler/unittest_realizer.cpp

index 01ba881..8e1d478 100644 (file)
@@ -167,6 +167,13 @@ public:
    * @note This method does add the provided layers itself but adds a deep copy
    * of the passed layers to the model. The layers passed to this function can
    * be reused later.
+   * @note @a reference is a set of layers connected each other to form a part
+   * or whole graph which can be loaded to a model and can be run.
+   * More specifically, the graph with a non cyclic, directed graph with all
+   * node has either incoming or outgoing connection defined when considering
+   * non-input layer is directly connected to the previous layer. 'input
+   * layer' is defined as a layer that has no incoming connection and
+   * (identified as @a start_layers or input shape is specified explicitly).
    *
    * @param reference a group of layers being referred to.
    * @param type type of reference layers
index 5b0ecea..3ba4206 100644 (file)
@@ -197,6 +197,7 @@ NNTRAINER_SRCS := $(NNTRAINER_ROOT)/nntrainer/models/neuralnet.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/compiler/ini_interpreter.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/compiler/flatten_realizer.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/compiler/recurrent_realizer.cpp \
+                  $(NNTRAINER_ROOT)/nntrainer/compiler/previous_input_realizer.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/compiler/remap_realizer.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/compiler/slice_realizer.cpp \
                   $(NNTRAINER_ROOT)/nntrainer/compiler/input_realizer.cpp \
index f7f86f9..d3c9fe1 100644 (file)
@@ -96,19 +96,6 @@ void NetworkGraph::updateConnectionName(const std::string &from,
   }
 }
 
-void NetworkGraph::addDefaultInputLayers() {
-  for (auto iter = cbegin() + 1; iter != cend(); iter++) {
-    auto layer = *iter;
-    auto prev_layer = *(iter - 1);
-    if (layer->getNumInputConnections() == 0 &&
-        !layer->hasInputShapeProperty()) {
-      ml_logd("default input added %s->%s", prev_layer->getName().c_str(),
-              layer->getName().c_str());
-      layer->addInputLayers(prev_layer->getName());
-    }
-  }
-}
-
 void NetworkGraph::addLayerNode(std::unique_ptr<Layer> layer) {
   graph.addNode(std::make_unique<LayerNode>(std::move(layer)));
 }
@@ -333,8 +320,6 @@ void NetworkGraph::markNodesForBackwarding() {
 int NetworkGraph::realizeGraph() {
   int status = ML_ERROR_NONE;
 
-  addDefaultInputLayers();
-
   /**
    * invariant: the new realized nodes are added to the end,
    * otherwise this iteration becomes invalid. So, every iteration must be
index dbeacb9..556828e 100644 (file)
@@ -445,11 +445,6 @@ private:
   void setOutputLayers();
 
   /**
-   * @brief     set default input layer connections
-   */
-  void addDefaultInputLayers();
-
-  /**
    * @brief     Ensure that layer has a name.
    * @param[in] layer Layer whose name is to be ensured to be valid
    * @param[in] prefix Prefix to be attached to the layer name
index 4f02e35..6769175 100644 (file)
@@ -37,6 +37,7 @@
 #include <nntrainer_log.h>
 #include <node_exporter.h>
 #include <optimizer_context.h>
+#include <previous_input_realizer.h>
 #include <profiler.h>
 #include <recurrent_realizer.h>
 #include <remap_realizer.h>
@@ -103,6 +104,16 @@ int NeuralNetwork::compile() {
                             ? std::string()
                             : std::get<props::LossType>(model_props);
 
+  auto &input_layer_prop =
+    std::get<std::vector<props::InputLayer>>(model_props);
+  /// @note label layer might need to be treated in the similar way as well
+
+  std::vector<std::string> input_layers = {};
+  if (!input_layer_prop.empty()) {
+    input_layers = std::vector<std::string>(input_layer_prop.begin(),
+                                            input_layer_prop.end());
+  }
+
   /// @todo make NetworkGraph compiled at the construction instead of having
   /// graph.compile(), neuralnetwork have ownership of list of layer nodes,
   /// which will be passed at compile time.
@@ -112,8 +123,14 @@ int NeuralNetwork::compile() {
     rep.push_back(*iter);
   }
 
-  FlattenRealizer fr;
-  rep = fr.realize(rep);
+  std::vector<std::unique_ptr<GraphRealizer>> realizers;
+
+  realizers.emplace_back(new PreviousInputRealizer(input_layers));
+  realizers.emplace_back(new FlattenRealizer());
+
+  for (auto &realizer : realizers) {
+    rep = realizer->realize(rep);
+  }
 
   model_graph = NetworkGraph();
   model_graph.setMemoryOptimizations(
@@ -704,7 +721,7 @@ int NeuralNetwork::train_run() {
     forwarding(false);
   };
 
-  auto update_eval_stat = [this, batch_size, &update_train_stat](
+  auto update_eval_stat = [batch_size, &update_train_stat](
                             RunStats &stat, const std::vector<Tensor> &outputs,
                             const std::vector<Tensor> &labels) {
     auto model_out = outputs[0].argmax();
@@ -934,6 +951,8 @@ void NeuralNetwork::addWithReferenceLayers(
   auto end_layers_ = normalize(end_layers);
 
   std::vector<std::unique_ptr<GraphRealizer>> realizers;
+
+  realizers.emplace_back(new PreviousInputRealizer(start_layers));
   realizers.emplace_back(new SliceRealizer(start_layers_, end_layers_));
 
   if (!input_layers_.empty()) {
index f303b7a..6ef4fe2 100644 (file)
@@ -134,8 +134,8 @@ TEST(nntrainerInterpreterTflite, simple_fc) {
                         "bias_initializer=ones", "weight_initializer=ones"});
 
   auto fc1_zeroed = LayerRepresentation(
-    "fully_connected",
-    {"name=fc1", "unit=2", "bias_initializer=ones", "weight_initializer=ones"});
+    "fully_connected", {"name=fc1", "unit=2", "bias_initializer=ones",
+                        "weight_initializer=ones", "input_layers=fc0"});
 
   auto g = makeGraph({fc0_zeroed, fc1_zeroed});
 
index 9dd82d4..be65142 100644 (file)
@@ -266,6 +266,24 @@ TEST(PreviousInputRealizer, previous_p) {
     PreviousInputRealizer r({"fc1", "fc4"});
     realizeAndEqual(r, before, after);
   }
+  { /// intermediate node is auto input
+    std::vector<LayerRepresentation> before = {
+      {"fully_connected",
+       {"name=fc1", "input_layers=fc2"}},                 // intermediate node
+      {"fully_connected", {"name=fc2", "input_shape=1"}}, // model input
+      {"fully_connected", {"name=fc3"}}, // auto connected to fc3
+      {"fully_connected", {"name=fc4"}}, // auto connected to fc 3
+    };
+
+    std::vector<LayerRepresentation> after = {
+      {"fully_connected", {"name=fc1", "input_layers=fc2"}},
+      {"fully_connected", {"name=fc2", "input_shape=1"}},
+      {"fully_connected", {"name=fc3", "input_layers=fc2"}},
+      {"fully_connected", {"name=fc4", "input_layers=fc3"}},
+    };
+    PreviousInputRealizer r({});
+    realizeAndEqual(r, before, after);
+  }
 }
 
 TEST(PreviousInputRealizer, user_not_identifying_first_input_n) {