[ TEST ] add more test cases for batch normalization realizer
authorjijoong.moon <jijoong.moon@samsung.com>
Wed, 20 Apr 2022 07:58:21 +0000 (16:58 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Tue, 26 Apr 2022 09:49:07 +0000 (18:49 +0900)
This patch adds the test case for batch normazliation realizer with
kind of resnet basic block which includes the multiout layer.

**Self evaluation:**
1. Build test:  [X]Passed [ ]Failed [ ]Skipped
2. Run test:  [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: jijoong.moon <jijoong.moon@samsung.com>
nntrainer/compiler/bn_realizer.cpp
nntrainer/compiler/bn_realizer.h
test/include/nntrainer_test_util.h
test/nntrainer_test_util.cpp
test/unittest/compiler/unittest_realizer.cpp

index a3cbbe0..d92f700 100644 (file)
@@ -24,10 +24,6 @@ namespace nntrainer {
 
 static constexpr size_t SINGLE_INOUT_IDX = 0;
 
-BnRealizer::BnRealizer() {}
-
-BnRealizer::~BnRealizer() {}
-
 GraphRepresentation BnRealizer::realize(const GraphRepresentation &reference) {
   std::unordered_map<std::string, LayerNode *> existing_nodes;
   std::vector<LayerNode *> bn_layers;
index 916c98d..8c7a9a0 100644 (file)
@@ -35,18 +35,19 @@ public:
    * @brief Construct a new BN Realizer object
    *
    */
-  BnRealizer();
+  BnRealizer() = default;
 
   /**
    * @brief Destroy the Graph Realizer object
    *
    */
-  ~BnRealizer();
+  ~BnRealizer() = default;
 
   /**
    * @brief graph realizer creates a shallow copied graph based on the reference
    * @note bn realizer removes batch normalization layers from
    * GraphRepresentation
+   * @param reference GraphRepresenstaion to be realized
    * @throw std::invalid_argument if graph is ill formed
    *
    */
index d28a09f..f01d888 100644 (file)
@@ -36,6 +36,7 @@
 #include <neuralnet.h>
 #include <nntrainer_error.h>
 #include <nntrainer_log.h>
+#include <realizer.h>
 #include <tensor.h>
 
 /** tolerance is reduced for packaging, but CI runs at full tolerance */
@@ -211,10 +212,12 @@ makeGraph(const std::vector<LayerRepresentation> &layer_reps);
  * @brief make graph of a representation after compile
  *
  * @param layer_reps layer representation (pair of type, properties)
+ * @param realizers GraphRealizers to modify graph before compile
  * @return nntrainer::GraphRepresentation synthesized graph representation
  */
-nntrainer::GraphRepresentation
-makeGraph_V2(const std::vector<LayerRepresentation> &layer_reps);
+nntrainer::GraphRepresentation makeCompiledGraph(
+  const std::vector<LayerRepresentation> &layer_reps,
+  std::vector<std::unique_ptr<nntrainer::GraphRealizer>> &realizers);
 
 /**
  * @brief read tensor after reading tensor size
index 44f6ae1..069f55d 100644 (file)
@@ -26,6 +26,7 @@
 #include <climits>
 #include <iostream>
 #include <layer_node.h>
+#include <multiout_realizer.h>
 #include <nntrainer_error.h>
 #include <random>
 #include <regex>
@@ -218,27 +219,37 @@ makeGraph(const std::vector<LayerRepresentation> &layer_reps) {
   return graph_rep;
 }
 
-nntrainer::GraphRepresentation
-makeGraph_V2(const std::vector<LayerRepresentation> &layer_reps) {
+nntrainer::GraphRepresentation makeCompiledGraph(
+  const std::vector<LayerRepresentation> &layer_reps,
+  std::vector<std::unique_ptr<nntrainer::GraphRealizer>> &realizers) {
   static auto &ac = nntrainer::AppContext::Global();
 
   nntrainer::GraphRepresentation graph_rep;
   auto model_graph = nntrainer::NetworkGraph();
+
   for (auto &layer_representation : layer_reps) {
     std::shared_ptr<nntrainer::LayerNode> layer = nntrainer::createLayerNode(
       ac.createObject<nntrainer::Layer>(layer_representation.first),
       layer_representation.second);
+    graph_rep.push_back(layer);
+  }
+
+  for (auto &realizer : realizers) {
+    graph_rep = realizer->realize(graph_rep);
+  }
+
+  for (auto &layer : graph_rep) {
     model_graph.addLayer(layer);
   }
-  // compile with loss
+
+  // Compile with loss
   model_graph.compile("mse");
 
+  graph_rep.clear();
   for (auto &node : model_graph.getLayerNodes()) {
     graph_rep.push_back(node);
   }
 
-  // remove loss layer
-  graph_rep.pop_back();
   return graph_rep;
 }
 
index 768ed0b..75aa65a 100644 (file)
@@ -45,12 +45,15 @@ static void realizeAndEqual(GraphRealizer &realizer,
   graphEqual(processed, expected_graph);
 }
 
-static void
-compileAndRealizeAndEqual(GraphRealizer &realizer,
-                          const std::vector<LayerRepresentation> &input,
-                          const std::vector<LayerRepresentation> &expected) {
-  auto processed = realizer.realize(makeGraph_V2(input));
-  auto expected_graph = makeGraph(expected);
+static void compileAndRealizeAndEqual(
+  GraphRealizer &realizer,
+  std::vector<std::unique_ptr<GraphRealizer>> &realizers,
+  const std::vector<LayerRepresentation> &input,
+  const std::vector<LayerRepresentation> &expected) {
+  auto processed = realizer.realize(makeCompiledGraph(input, realizers));
+  std::vector<std::unique_ptr<nntrainer::GraphRealizer>> defalute_realizers;
+  auto expected_graph = makeCompiledGraph(expected, realizers);
+
   graphEqual(processed, expected_graph);
 }
 
@@ -779,34 +782,55 @@ TEST(BnRealizer, bn_realizer_p) {
   /// realization without identifying custom input
   std::vector<LayerRepresentation> before = {
     {"fully_connected", {"name=fc1"}},
-    {"batch_normalization",
-     {"name=bn1", "input_layers=fc1"}}, // auto connected to fc 1
-    {"activation",
-     {"name=ac1", "activation=relu",
-      "input_layers=bn1"}}, // auto connected to bn 1
-    {"fully_connected",
-     {"name=fc2", "input_layers=ac1"}}, // auto connected to ac 1
-    {"batch_normalization",
-     {"name=bn2", "input_layers=fc2"}}, // auto connected to fc 2
-    {"activation",
-     {"name=ac2", "activation=relu",
-      "input_layers=bn2"}}, // auto connected to fc 2
-    {"fully_connected",
-     {"name=fc3", "input_layers=ac2"}}, // auto connected to ac 2
+    {"batch_normalization", {"name=bn1", "input_layers=fc1"}},
+    {"activation", {"name=ac1", "activation=relu", "input_layers=bn1"}},
+    {"fully_connected", {"name=fc2", "input_layers=ac1"}},
+    {"batch_normalization", {"name=bn2", "input_layers=fc2"}},
+    {"activation", {"name=ac2", "activation=relu", "input_layers=bn2"}},
+    {"fully_connected", {"name=fc3", "input_layers=ac2"}},
   };
   std::vector<LayerRepresentation> after = {
     {"fully_connected", {"name=fc1"}},
-    {"activation",
-     {"name=ac1", "activation=relu",
-      "input_layers=fc1"}}, // auto connected to fc 1
+    {"activation", {"name=ac1", "activation=relu", "input_layers=fc1"}},
     {"fully_connected", {"name=fc2", "input_layers=ac1"}},
-    {"activation",
-     {"name=ac2", "activation=relu",
-      "input_layers=fc2"}}, // auto connected to fc 1
-    {"fully_connected",
-     {"name=fc3", "input_layers=ac2"}}, // auto connected to fc 3
+    {"activation", {"name=ac2", "activation=relu", "input_layers=fc2"}},
+    {"fully_connected", {"name=fc3", "input_layers=ac2"}},
   };
-  BnRealizer r({});
-  compileAndRealizeAndEqual(r, before, after);
+  BnRealizer r;
+  std::vector<std::unique_ptr<nntrainer::GraphRealizer>> realizers;
+  compileAndRealizeAndEqual(r, realizers, before, after);
+}
 
+TEST(BnRealizer, bn_realizer_resblock_p) {
+  std::vector<LayerRepresentation> before = {
+    {"input", {"name=input0"}},
+    {"conv2d", {"name=conv0", "kernel_size=3,3", "input_layers=input0"}},
+    {"batch_normalization", {"name=first_bn", "input_layers=conv0"}},
+    {"activation", {"name=ac0", "activation=relu", "input_layers=first_bn"}},
+    {"conv2d", {"name=a1", "kernel_size=3,3", "input_layers=ac0"}},
+    {"batch_normalization", {"name=bn1", "input_layers=a1"}},
+    {"activation", {"name=ac1", "activation=relu", "input_layers=bn1"}},
+    {"conv2d", {"name=a2", "kernel_size=3,3", "input_layers=ac1"}},
+    {"conv2d", {"name=b1", "kernel_size=3,3", "input_layers=ac0"}},
+    {"addition", {"name=c1", "input_layers=a2,b1"}},
+    {"batch_normalization", {"name=bn2", "input_layers=c1"}},
+    {"activation", {"name=ac2", "activation=relu", "input_layers=bn2"}},
+    {"fully_connected", {"name=fc3", "input_layers=ac2"}},
+  };
+  std::vector<LayerRepresentation> after = {
+    {"input", {"name=input0"}},
+    {"conv2d", {"name=conv0", "kernel_size=3,3", "input_layers=input0"}},
+    {"activation", {"name=ac0", "activation=relu", "input_layers=conv0"}},
+    {"conv2d", {"name=a1", "kernel_size=3,3", "input_layers=ac0"}},
+    {"activation", {"name=ac1", "activation=relu", "input_layers=a1"}},
+    {"conv2d", {"name=a2", "kernel_size=3,3", "input_layers=ac1"}},
+    {"conv2d", {"name=b1", "kernel_size=3,3", "input_layers=ac0"}},
+    {"addition", {"name=c1", "input_layers=a2,b1"}},
+    {"activation", {"name=ac2", "activation=relu", "input_layers=c1"}},
+    {"fully_connected", {"name=fc3", "input_layers=ac2"}},
+  };
+  std::vector<std::unique_ptr<nntrainer::GraphRealizer>> realizers;
+  realizers.emplace_back(new nntrainer::MultioutRealizer());
+  BnRealizer r;
+  compileAndRealizeAndEqual(r, realizers, before, after);
 }