[Test] Add multiout tests
authorJihoon Lee <jhoon.it.lee@samsung.com>
Wed, 24 Nov 2021 10:58:34 +0000 (19:58 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Fri, 3 Dec 2021 06:54:34 +0000 (15:54 +0900)
This patch add multiout tests

**Self evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test: [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Jihoon Lee <jhoon.it.lee@samsung.com>
nntrainer/compiler/previous_input_realizer.cpp
nntrainer/layers/layer_node.cpp
packaging/unittest_models_multiout.tar.gz [new file with mode: 0644]
test/input_gen/genModelsMultiout_v2.py [new file with mode: 0644]
test/unittest/meson.build
test/unittest/models/models_golden_test.h
test/unittest/models/unittest_models_multiout.cpp

index 19d40dd..56f57f6 100644 (file)
@@ -9,13 +9,13 @@
  * @author Jihoon Lee <jhoon.it.lee@samsung.com>
  * @bug No known bugs except for NYI items
  */
-#include "connection.h"
 #include <algorithm>
 #include <compiler_fwd.h>
 #include <memory>
 #include <stdexcept>
 #include <vector>
 
+#include <connection.h>
 #include <layer_node.h>
 #include <nntrainer_log.h>
 #include <previous_input_realizer.h>
index e2821be..9e4fd91 100644 (file)
@@ -20,6 +20,7 @@
 #include <app_context.h>
 #include <base_properties.h>
 #include <common_properties.h>
+#include <connection.h>
 #include <layer_node.h>
 #include <nntrainer_error.h>
 #include <nntrainer_log.h>
@@ -253,16 +254,26 @@ std::ostream &operator<<(std::ostream &out, const LayerNode &l) {
     std::get<std::vector<props::InputConnection>>(*l.layer_node_props);
 
   out << "[" << l.getName() << '/' << l.getType() << "]\n";
-  auto print_vector = [&out](const auto &layers, const std::string &title) {
-    out << title << "[" << layers.size() << "] ";
-    for (auto &layer : layers) {
-      out << to_string(layer) << ' ';
+  auto print_vector = [&out](const auto &cons, const std::string &title) {
+    out << title << "[" << cons.size() << "] ";
+    for (auto &con : cons) {
+      out << con.toString() << ' ';
     }
     out << '\n';
   };
 
-  print_vector(input_connections, " input_connections");
-  //   print_vector(l.output_connections, "output_connections");
+  auto print_vector_2 = [&out](const auto &cons, const std::string &title) {
+    out << title << "[" << cons.size() << "] ";
+    for (auto &con : cons) {
+      out << con->toString() << ' ';
+    }
+    out << '\n';
+  };
+
+  print_vector(
+    std::vector<Connection>(input_connections.begin(), input_connections.end()),
+    " input_connections");
+  print_vector_2(l.output_connections, "output_connections");
   return out;
 }
 
diff --git a/packaging/unittest_models_multiout.tar.gz b/packaging/unittest_models_multiout.tar.gz
new file mode 100644 (file)
index 0000000..14c42f0
Binary files /dev/null and b/packaging/unittest_models_multiout.tar.gz differ
diff --git a/test/input_gen/genModelsMultiout_v2.py b/test/input_gen/genModelsMultiout_v2.py
new file mode 100644 (file)
index 0000000..aa3a8ea
--- /dev/null
@@ -0,0 +1,84 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: Apache-2.0
+##
+# Copyright (C) 2021 Jihoon Lee <jhoon.it.lee@samsung.com>
+#
+# @file genModelsMultiout_v2.py
+# @date 24 November 2021
+# @brief Generate multiout model tcs
+# @author Jihoon lee <jhoon.it.lee@samsung.com>
+
+from recorder_v2 import record_v2, inspect_file
+import torch
+
+
+class SplitAndJoin(torch.nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.fc = torch.nn.Linear(3, 2)
+        self.fc1 = torch.nn.Linear(1, 3)
+        self.fc2 = torch.nn.Linear(1, 3)
+        self.loss = torch.nn.MSELoss()
+
+    def forward(self, inputs, labels):
+        out = self.fc(inputs[0])
+        a0, a1 = torch.split(out, 1, dim=1)
+        out = self.fc1(a0) + self.fc2(a1)
+        loss = self.loss(out, labels[0])
+        return out, loss
+
+
+class OneToOne(torch.nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.fc = torch.nn.Linear(3, 2)
+        self.loss = torch.nn.MSELoss()
+
+    def forward(self, inputs, labels):
+        out = self.fc(inputs[0])
+        a0, a1 = torch.split(out, 1, dim=1)
+        out = a0 + a1
+        loss = self.loss(out, labels[0])
+        return out, loss
+
+class OneToMany(torch.nn.Module):
+    def __init__(self):
+        super().__init__()
+        self.fc = torch.nn.Linear(2, 3)
+        self.loss = torch.nn.MSELoss()
+
+    def forward(self, inputs, labels):
+        out = self.fc(inputs[0])
+        a0, a1, a2 = torch.split(out, 1, dim=1)
+        b0 = a0 + a1
+        c0 = a0 + a1
+        d0 = b0 + c0 + a2
+        loss = self.loss(d0, labels[0])
+        return d0, loss
+
+if __name__ == "__main__":
+    record_v2(
+        SplitAndJoin(),
+        iteration=2,
+        input_dims=[(5, 3)],
+        label_dims=[(5, 3)],
+        name="split_and_join"
+    )
+
+    record_v2(
+        OneToOne(),
+        iteration=2,
+        input_dims=[(5, 3)],
+        label_dims=[(5, 1)],
+        name="one_to_one"
+    )
+
+    record_v2(
+        OneToMany(),
+        iteration=2,
+        input_dims=[(5, 2)],
+        label_dims=[(5, 1)],
+        name="one_to_many"
+    )
+
+    inspect_file("one_to_many.nnmodelgolden")
index 3642286..389f5c1 100644 (file)
@@ -12,6 +12,7 @@ unzip_target = [
   ['unittest_layers_v2.tar.gz', 'unittest_layers'],
   ['unittest_models.tar.gz', 'unittest_models'],
   ['unittest_models_v2.tar.gz', 'unittest_models'],
+  ['unittest_models_multiout.tar.gz', 'unittest_models'],
 ]
 
 src_path = meson.source_root() / 'packaging'
index 937156d..33870c9 100644 (file)
@@ -40,7 +40,9 @@ typedef enum {
   COMPARE_V2 = COMPARE | USE_V2,                 /**< compare v2 */
   NO_THROW_RUN_V2 = NO_THROW_RUN | USE_V2,       /**< no throw run with v2 */
   SAVE_AND_LOAD_V2 = SAVE_AND_LOAD_INI | USE_V2, /**< save and load with v2 */
-  ALL = COMPARE | SAVE_AND_LOAD_INI              /**< Set every option */
+
+  ALL = COMPARE | SAVE_AND_LOAD_INI, /**< Set every option */
+  ALL_V2 = ALL | USE_V2              /**< Set every option with v2 */
 } ModelTestOption;
 
 using ModelGoldenTestParamType =
index d9d9b2e..db1452f 100644 (file)
@@ -34,15 +34,15 @@ using namespace nntrainer;
 ///    D
 static std::unique_ptr<NeuralNetwork> split_and_join() {
   std::unique_ptr<NeuralNetwork> nn(new NeuralNetwork());
-  nn->setProperty({"batch_size=1"});
+  nn->setProperty({"batch_size=5"});
 
   auto graph = makeGraph({
-    {"fully_connected", {"name=fc", "input_shape=1:1:2", "unit=2"}},
+    {"fully_connected", {"name=fc", "input_shape=1:1:3", "unit=2"}},
     {"split", {"name=a", "input_layers=fc", "axis=3"}},
-    {"fully_connected", {"name=b", "input_layers=a(0)", "unit=2"}},
-    {"fully_connected", {"name=c", "input_layers=a(1)", "unit=2"}},
+    {"fully_connected", {"name=c", "input_layers=a(1)", "unit=3"}},
+    {"fully_connected", {"name=b", "input_layers=a(0)", "unit=3"}},
     {"addition", {"name=d", "input_layers=b,c"}},
-    {"constant_derivative", {"name=loss", "input_layers=d"}},
+    {"mse", {"name=loss", "input_layers=d"}},
   });
   for (auto &node : graph) {
     nn->addLayer(node);
@@ -59,15 +59,15 @@ static std::unique_ptr<NeuralNetwork> split_and_join() {
 ///  v    v
 /// (a0, a1)
 ///    B
-[[maybe_unused]] static std::unique_ptr<NeuralNetwork> one_to_one() {
+static std::unique_ptr<NeuralNetwork> one_to_one() {
   std::unique_ptr<NeuralNetwork> nn(new NeuralNetwork());
-  nn->setProperty({"batch_size=1"});
+  nn->setProperty({"batch_size=5"});
 
   auto graph = makeGraph({
-    {"fully_connected", {"name=fc", "input_shape=1:1:2", "unit=2"}},
+    {"fully_connected", {"name=fc", "input_shape=1:1:3", "unit=2"}},
     {"split", {"name=a", "input_layers=fc", "axis=3"}},
     {"addition", {"name=b", "input_layers=a(0),a(1)"}},
-    {"constant_derivative", {"name=loss", "input_layers=b"}},
+    {"mse", {"name=loss", "input_layers=b"}},
   });
   for (auto &node : graph) {
     nn->addLayer(node);
@@ -84,15 +84,15 @@ static std::unique_ptr<NeuralNetwork> split_and_join() {
 ///  v    v
 /// (a0, a1)
 ///    B
-[[maybe_unused]] static std::unique_ptr<NeuralNetwork> one_to_one_reversed() {
+static std::unique_ptr<NeuralNetwork> one_to_one_reversed() {
   std::unique_ptr<NeuralNetwork> nn(new NeuralNetwork());
-  nn->setProperty({"batch_size=1"});
+  nn->setProperty({"batch_size=5"});
 
   auto graph = makeGraph({
-    {"fully_connected", {"name=fc", "input_shape=1:1:2", "unit=2"}},
+    {"fully_connected", {"name=fc", "input_shape=1:1:3", "unit=2"}},
     {"split", {"name=a", "input_layers=fc", "axis=3"}},
     {"addition", {"name=b", "input_layers=a(1),a(0)"}},
-    {"constant_derivative", {"name=loss", "input_layers=b"}},
+    {"mse", {"name=loss", "input_layers=b"}},
   });
   for (auto &node : graph) {
     nn->addLayer(node);
@@ -104,30 +104,31 @@ static std::unique_ptr<NeuralNetwork> split_and_join() {
 
 /// A has two output tensor a1, a2 and B and C takes it
 ///     A
-/// (a0, a1)
-///  | \  |-------
-///  |  - + - \   |
-///  v    v   v   v
-/// (a0, a1) (a0, a1)
-///    B         C
-///   (b0)      (c0)
-///     \        /
-///      \      /
+/// (a0, a1, a2)---------------->(a2)
+///  | \  |-------                E
+///  |  - + - \   |              /
+///  v    v   v   v             /
+/// (a0, a1) (a0, a1)          /
+///    B         C            /
+///   (b0)      (c0)         /
+///     \        /          /
+///      \      /----------
 ///         v
 ///        (d0)
 ///          D
-[[maybe_unused]] static std::unique_ptr<NeuralNetwork> one_to_many() {
+static std::unique_ptr<NeuralNetwork> one_to_many() {
   std::unique_ptr<NeuralNetwork> nn(new NeuralNetwork());
-  nn->setProperty({"batch_size=1"});
+  nn->setProperty({"batch_size=5"});
 
   auto graph = makeGraph({
-    {"fully_connected", {"name=fc", "input_shape=1:1:2", "unit=2"}},
+    {"fully_connected", {"name=fc", "input_shape=1:1:2", "unit=3"}},
     {"split", {"name=a", "input_layers=fc", "axis=3"}},
     {"addition", {"name=b", "input_layers=a(0),a(1)"}},
     {"addition", {"name=c", "input_layers=a(0),a(1)"}},
-    {"addition", {"name=d", "input_layers=b,c"}},
-    {"constant_derivative", {"name=loss", "input_layers=d"}},
+    {"addition", {"name=d", "input_layers=b,c,a(2)"}},
+    {"mse", {"name=loss", "input_layers=d"}},
   });
+
   for (auto &node : graph) {
     nn->addLayer(node);
   }
@@ -139,8 +140,11 @@ static std::unique_ptr<NeuralNetwork> split_and_join() {
 INSTANTIATE_TEST_CASE_P(
   multiInoutModels, nntrainerModelTest,
   ::testing::ValuesIn({
-    mkModelTc_V2(split_and_join, "split_and_join",
-                 ModelTestOption::SAVE_AND_LOAD_V2),
+    mkModelTc_V2(split_and_join, "split_and_join", ModelTestOption::ALL_V2),
+    mkModelTc_V2(one_to_one, "one_to_one", ModelTestOption::ALL_V2),
+    mkModelTc_V2(one_to_one_reversed, "one_to_one__reversed",
+                 ModelTestOption::ALL_V2),
+    mkModelTc_V2(one_to_many, "one_to_many", ModelTestOption::ALL_V2),
   }),
   [](const testing::TestParamInfo<nntrainerModelTest::ParamType> &info) {
     return std::get<1>(info.param);