[Recurrent] Support connection for as_sequence
authorJihoon Lee <jhoon.it.lee@samsung.com>
Fri, 17 Dec 2021 09:07:31 +0000 (18:07 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Wed, 29 Dec 2021 06:20:00 +0000 (15:20 +0900)
This patch enables recurrent realizer to consume support connection
residing in as_sequence parameter

**Self evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test: [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Jihoon Lee <jhoon.it.lee@samsung.com>
nntrainer/compiler/recurrent_realizer.cpp
nntrainer/compiler/recurrent_realizer.h
test/unittest/compiler/unittest_realizer.cpp
test/unittest/unittest_nntrainer_models.cpp

index a0af694..cf9c116 100644 (file)
  * @bug No known bugs except for NYI items
  */
 #include <algorithm>
+#include <iterator>
 #include <stdexcept>
+#include <string>
 
+#include <base_properties.h>
 #include <common_properties.h>
 #include <connection.h>
 #include <grucell.h>
@@ -101,18 +104,36 @@ RecurrentRealizer::RecurrentRealizer(const std::vector<std::string> &properties,
                                      const std::vector<Connection> &input_conns,
                                      const std::vector<Connection> &end_conns) :
   input_layers(),
-  end_conns(end_conns),
+  end_info(),
   sequenced_return_conns(),
   recurrent_props(new PropTypes(
     std::vector<props::RecurrentInput>(), std::vector<props::RecurrentOutput>(),
     std::vector<props::AsSequence>(), props::UnrollFor(1))) {
   auto left = loadProperties(properties, *recurrent_props);
 
-  /// @todo support AsSequence with index with identity layer
   std::transform(input_conns.begin(), input_conns.end(),
                  std::inserter(this->input_layers, this->input_layers.begin()),
                  [](const Connection &c) { return c.getName(); });
 
+  /// build end info.
+  /// eg)
+  /// end_layers: a(0), a(3), b(0) becomes
+  /// end_info: {{a, 3}, {b, 0}}
+  /// end_layers: a(1), b(3), c(0) becomes
+  /// end_info: {{a, 1}, {b, 3}, {c, 0}}
+  for (unsigned i = 0u, sz = end_conns.size(); i < sz; ++i) {
+    const auto &name = end_conns[i].getName();
+    const auto &idx = end_conns[i].getIndex();
+    auto iter =
+      std::find_if(end_info.begin(), end_info.end(),
+                   [&name](auto &info) { return info.first == name; });
+    if (iter == end_info.end()) {
+      end_info.emplace_back(name, idx);
+    } else {
+      iter->second = std::max(iter->second, idx);
+    }
+  }
+
   auto &[inputs, outputs, as_sequence, unroll_for] = *recurrent_props;
 
   NNTR_THROW_IF(inputs.empty() || inputs.size() != outputs.size(),
@@ -276,28 +297,13 @@ RecurrentRealizer::realize(const GraphRepresentation &reference) {
   };
 
   /**
-   * @brief case when return sequence is not true, only last output is renamed
-   * @todo support connection using node->remapConnection
-   */
-  auto naive_output = [](const GraphRepresentation &reference_,
-                         const Connection &con, unsigned unroll_for) {
-    auto target = con.getName() + "/" + std::to_string(unroll_for - 1);
-    RemapRealizer r([target, con](std::string &name) {
-      if (name == target) {
-        name = con.getName();
-      }
-    });
-
-    return r.realize(reference_);
-  };
-
-  /**
    * @brief case when return sequence is true, concat layer is added to
    * aggregate all the output
    *
    */
-  auto concat_output = [this](const GraphRepresentation &reference_,
-                              const Connection &con, unsigned unroll_for) {
+  auto concat_output = [](const GraphRepresentation &reference_,
+                          const Connection &con, unsigned unroll_for,
+                          const std::string &new_layer_name) {
     GraphRepresentation processed(reference_.begin(), reference_.end());
 
     std::vector<props::RecurrentInput> conns;
@@ -310,30 +316,61 @@ RecurrentRealizer::realize(const GraphRepresentation &reference) {
     /// @todo have axis in concat layer
     /// @todo this has to be wrapped with identity layer as #1793
     auto node = createLayerNode(
-      "concat", {"name=" + con.getName(), "input_layers=" + to_string(conns)});
+      "concat", {"name=" + new_layer_name, "input_layers=" + to_string(conns)});
     processed.push_back(std::move(node));
 
     return processed;
   };
 
   /**
-   * @brief set output name
+   * @brief create identity layer with output name by either creating concat
+   * layer or directly using the connection, the number of inputs connection
+   * have is depending on the end_conns max.
+   *
+   * eg)
+   * layer A outputs a, b, c, d
+   *
+   * if end_layers=A(0),A(2)
+   *    as_sequence=A(0)
+   * realizer cannot know there is d so this is ignored. It is okay because user
+   * didn't specify to use it anyway
+   *
+   * [A]
+   * type=identity
+   * input_layers=A_concat_0, A(1), A(2)
    *
    */
-  auto step3_connect_output =
-    [this, naive_output, concat_output](const GraphRepresentation &reference_,
-                                        unsigned unroll_for) {
-      /// @note below is inefficient way of processing nodes. consider optimize
-      /// below as needed by calling remap realizer only once
-      auto processed = reference_;
-      for (auto &conn : end_conns) {
-        processed = sequenced_return_conns.count(conn)
-                      ? concat_output(processed, conn, unroll_for)
-                      : naive_output(processed, conn, unroll_for);
+  auto step3_connect_output = [this, concat_output](
+                                const GraphRepresentation &reference_,
+                                unsigned unroll_for) {
+    /// @note below is inefficient way of processing nodes. consider optimize
+    /// below as needed by calling remap realizer only once
+    auto processed = reference_;
+    for (auto [name, max_idx] : end_info) {
+
+      std::vector<props::InputConnection> out_node_inputs;
+
+      for (auto i = 0u; i <= max_idx; ++i) {
+
+        if (auto con = Connection(name, i); sequenced_return_conns.count(con)) {
+          auto concat_name = name + "/concat_" + std::to_string(i);
+          processed = concat_output(processed, con, unroll_for, concat_name);
+          // create concat connection name,
+          out_node_inputs.emplace_back(Connection(concat_name, 0));
+        } else {
+          auto last_layer_name = name + "/" + std::to_string(unroll_for - 1);
+          out_node_inputs.emplace_back(Connection(last_layer_name, i));
+        }
       }
 
-      return processed;
-    };
+      auto alias_layer = createLayerNode(
+        "identity",
+        {"name=" + name, "input_layers=" + to_string(out_node_inputs)});
+      processed.push_back(std::move(alias_layer));
+    }
+
+    return processed;
+  };
 
   auto unroll_for = std::get<props::UnrollFor>(*recurrent_props).get();
   step0_verify_and_prepare();
index de84efc..2974fd7 100644 (file)
@@ -20,6 +20,7 @@
 #include <tuple>
 #include <unordered_map>
 #include <unordered_set>
+#include <utility>
 #include <vector>
 
 #include <connection.h>
@@ -91,7 +92,9 @@ private:
                std::vector<props::AsSequence>, props::UnrollFor>;
 
   std::unordered_set<std::string> input_layers; /**< external input layers */
-  std::vector<Connection> end_conns;            /**< final output layers id */
+  std::vector<std::pair<std::string /**< connection name*/,
+                        unsigned /**< max idx requested */>>
+    end_info; /**< final end layers id */
   std::unordered_set<Connection>
     sequenced_return_conns; /**< sequenced return conns, subset of end_conns
                              */
index 4e31e3c..90e08c7 100644 (file)
@@ -69,16 +69,24 @@ TEST(RecurrentRealizer, recurrent_no_return_sequence_p) {
     {"fully_connected", {"name=fc_out", "input_layers=fc_in"}}};
 
   std::vector<LayerRepresentation> expected = {
+    /// t - 0
     {"fully_connected", {"name=fc_in/0", "input_layers=source"}},
     {"fully_connected", {"name=fc_out/0", "input_layers=fc_in/0"}},
+
+    /// t - 1
     {"fully_connected",
      {"name=fc_in/1", "input_layers=fc_out/0", "shared_from=fc_in/0"}},
     {"fully_connected",
      {"name=fc_out/1", "input_layers=fc_in/1", "shared_from=fc_out/0"}},
+
+    /// t - 2
     {"fully_connected",
      {"name=fc_in/2", "input_layers=fc_out/1", "shared_from=fc_in/0"}},
     {"fully_connected",
-     {"name=fc_out", "input_layers=fc_in/2", "shared_from=fc_out/0"}},
+     {"name=fc_out/2", "input_layers=fc_in/2", "shared_from=fc_out/0"}},
+
+    /// mapping
+    {"identity", {"name=fc_out", "input_layers=fc_out/2"}},
   };
 
   realizeAndEqual(r, before, expected);
@@ -95,103 +103,29 @@ TEST(RecurrentRealizer, recurrent_return_sequence_single_p) {
     {"fully_connected", {"name=fc_out", "input_layers=lstm"}}};
 
   std::vector<LayerRepresentation> expected = {
+    /// t - 0
     {"lstm",
      {"name=lstm/0", "input_layers=source", "max_timestep=3", "timestep=0"}},
     {"fully_connected", {"name=fc_out/0", "input_layers=lstm/0"}},
+
+    /// t - 1
     {"lstm",
      {"name=lstm/1", "input_layers=fc_out/0", "shared_from=lstm/0",
       "max_timestep=3", "timestep=1"}},
     {"fully_connected",
      {"name=fc_out/1", "input_layers=lstm/1", "shared_from=fc_out/0"}},
+
+    /// t - 2
     {"lstm",
      {"name=lstm/2", "input_layers=fc_out/1", "shared_from=lstm/0",
       "max_timestep=3", "timestep=2"}},
     {"fully_connected",
      {"name=fc_out/2", "input_layers=lstm/2", "shared_from=fc_out/0"}},
-    {"concat", {"name=fc_out", "input_layers=fc_out/0,fc_out/1,fc_out/2"}},
-  };
-
-  realizeAndEqual(r, before, expected);
-}
-
-TEST(RecurrentRealizer, recurrent_multi_inout_p) {
-  using C = Connection;
-  RecurrentRealizer r(
-    {
-      "unroll_for=3",
-      "recurrent_input=lstm,source3_dummy",
-      "recurrent_output=fc_out,output_dummy",
-    },
-    {C("source"), C("source2"), C("source3")}, {C("fc_out")});
 
-  /// @note for below graph,
-  /// 1. fc_out feds back to lstm
-  /// 2. ouput_dummy feds back to source2_dummy
-  /// ========================================================
-  /// lstm        -------- addition - split ---- fc_out
-  /// source2_dummy   --/                  \-----output_dummy
-  /// source3_dummy    /
-  std::vector<LayerRepresentation> before = {
-    {"lstm", {"name=lstm", "input_layers=source"}},
-    {"concat", {"name=source2_dummy", "input_layers=source2"}},
-    {"concat", {"name=source3_dummy", "input_layers=source3"}},
-    {"addition", {"name=add", "input_layers=lstm,source2_dummy,source3_dummy"}},
-    {"split", {"name=split", "input_layers=add"}},
-    {"concat", {"name=output_dummy", "input_layers=split(1)"}},
-    {"fully_connected", {"name=fc_out", "input_layers=split(0)"}},
-  };
-
-  std::vector<LayerRepresentation> expected = {
-    /// timestep 0
-    {"lstm",
-     {"name=lstm/0", "input_layers=source", "max_timestep=3", "timestep=0"}},
-    {"concat", {"name=source2_dummy/0", "input_layers=source2"}},
-    {"concat", {"name=source3_dummy/0", "input_layers=source3"}},
-    {"addition",
-     {"name=add/0", "input_layers=lstm/0,source2_dummy/0,source3_dummy/0"}},
-    {"split", {"name=split/0", "input_layers=add/0"}},
-    {"concat", {"name=output_dummy/0", "input_layers=split/0(1)"}},
-    {"fully_connected", {"name=fc_out/0", "input_layers=split/0(0)"}},
-
-    /// timestep 1
-    {"lstm",
-     {"name=lstm/1", "input_layers=fc_out/0", "shared_from=lstm/0",
-      "max_timestep=3", "timestep=1"}},
-    {"concat",
-     {"name=source2_dummy/1", "shared_from=source2_dummy/0",
-      "input_layers=source2"}},
-    {"concat",
-     {"name=source3_dummy/1", "shared_from=source3_dummy/0",
-      "input_layers=output_dummy/0"}},
-    {"addition",
-     {"name=add/1", "input_layers=lstm/1,source2_dummy/1,source3_dummy/1",
-      "shared_from=add/0"}},
-    {"split", {"name=split/1", "input_layers=add/1", "shared_from=split/0"}},
+    /// mapping
     {"concat",
-     {"name=output_dummy/1", "input_layers=split/1(1)",
-      "shared_from=output_dummy/0"}},
-    {"fully_connected",
-     {"name=fc_out/1", "input_layers=split/1(0)", "shared_from=fc_out/0"}},
-
-    /// timestep 2
-    {"lstm",
-     {"name=lstm/2", "input_layers=fc_out/1", "shared_from=lstm/0",
-      "max_timestep=3", "timestep=2"}},
-    {"concat",
-     {"name=source2_dummy/2", "shared_from=source2_dummy/0",
-      "input_layers=source2"}},
-    {"concat",
-     {"name=source3_dummy/2", "shared_from=source3_dummy/0",
-      "input_layers=output_dummy/1"}},
-    {"addition",
-     {"name=add/2", "input_layers=lstm/2,source2_dummy/2,source3_dummy/2",
-      "shared_from=add/0"}},
-    {"split", {"name=split/2", "input_layers=add/2", "shared_from=split/0"}},
-    {"concat",
-     {"name=output_dummy/2", "input_layers=split/2(1)",
-      "shared_from=output_dummy/0"}},
-    {"fully_connected",
-     {"name=fc_out", "input_layers=split/2(0)", "shared_from=fc_out/0"}},
+     {"name=fc_out/concat_0", "input_layers=fc_out/0,fc_out/1,fc_out/2"}},
+    {"identity", {"name=fc_out", "input_layers=fc_out/concat_0"}},
   };
 
   realizeAndEqual(r, before, expected);
@@ -202,9 +136,9 @@ TEST(RecurrentRealizer, recurrent_multi_inout_return_seq_p) {
   RecurrentRealizer r(
     {
       "unroll_for=3",
-      "recurrent_input=lstm,source3_dummy",
       "as_sequence=fc_out",
-      "recurrent_output=fc_out,output_dummy",
+      "recurrent_input=lstm,add(2)",
+      "recurrent_output=fc_out,split(1)",
     },
     {C("source"), C("source2"), C("source3")}, {C("fc_out")});
 
@@ -212,16 +146,12 @@ TEST(RecurrentRealizer, recurrent_multi_inout_return_seq_p) {
   /// 1. fc_out feds back to lstm
   /// 2. ouput_dummy feds back to source2_dummy
   /// ========================================================
-  /// lstm        -------- addition - split ---- fc_out
-  /// source2_dummy   --/                  \-----output_dummy
-  /// source3_dummy    /
+  /// lstm        -------- addition - split ---- fc_out (to_lstm)
+  /// source2_dummy   --/                  \----- (to addition 3)
   std::vector<LayerRepresentation> before = {
     {"lstm", {"name=lstm", "input_layers=source"}},
-    {"concat", {"name=source2_dummy", "input_layers=source2"}},
-    {"concat", {"name=source3_dummy", "input_layers=source3"}},
-    {"addition", {"name=add", "input_layers=lstm,source2_dummy,source3_dummy"}},
+    {"addition", {"name=add", "input_layers=lstm,source2,source3"}},
     {"split", {"name=split", "input_layers=add"}},
-    {"concat", {"name=output_dummy", "input_layers=split(1)"}},
     {"fully_connected", {"name=fc_out", "input_layers=split(0)"}},
   };
 
@@ -229,31 +159,18 @@ TEST(RecurrentRealizer, recurrent_multi_inout_return_seq_p) {
     /// timestep 0
     {"lstm",
      {"name=lstm/0", "input_layers=source", "max_timestep=3", "timestep=0"}},
-    {"concat", {"name=source2_dummy/0", "input_layers=source2"}},
-    {"concat", {"name=source3_dummy/0", "input_layers=source3"}},
-    {"addition",
-     {"name=add/0", "input_layers=lstm/0,source2_dummy/0,source3_dummy/0"}},
+    {"addition", {"name=add/0", "input_layers=lstm/0,source2,source3"}},
     {"split", {"name=split/0", "input_layers=add/0"}},
-    {"concat", {"name=output_dummy/0", "input_layers=split/0(1)"}},
     {"fully_connected", {"name=fc_out/0", "input_layers=split/0(0)"}},
 
     /// timestep 1
     {"lstm",
      {"name=lstm/1", "input_layers=fc_out/0", "shared_from=lstm/0",
       "max_timestep=3", "timestep=1"}},
-    {"concat",
-     {"name=source2_dummy/1", "shared_from=source2_dummy/0",
-      "input_layers=source2"}},
-    {"concat",
-     {"name=source3_dummy/1", "shared_from=source3_dummy/0",
-      "input_layers=output_dummy/0"}},
     {"addition",
-     {"name=add/1", "input_layers=lstm/1,source2_dummy/1,source3_dummy/1",
+     {"name=add/1", "input_layers=lstm/1,source2,split/0(1)",
       "shared_from=add/0"}},
     {"split", {"name=split/1", "input_layers=add/1", "shared_from=split/0"}},
-    {"concat",
-     {"name=output_dummy/1", "input_layers=split/1(1)",
-      "shared_from=output_dummy/0"}},
     {"fully_connected",
      {"name=fc_out/1", "input_layers=split/1(0)", "shared_from=fc_out/0"}},
 
@@ -261,35 +178,27 @@ TEST(RecurrentRealizer, recurrent_multi_inout_return_seq_p) {
     {"lstm",
      {"name=lstm/2", "input_layers=fc_out/1", "shared_from=lstm/0",
       "max_timestep=3", "timestep=2"}},
-    {"concat",
-     {"name=source2_dummy/2", "shared_from=source2_dummy/0",
-      "input_layers=source2"}},
-    {"concat",
-     {"name=source3_dummy/2", "shared_from=source3_dummy/0",
-      "input_layers=output_dummy/1"}},
     {"addition",
-     {"name=add/2", "input_layers=lstm/2,source2_dummy/2,source3_dummy/2",
+     {"name=add/2", "input_layers=lstm/2,source2,split/1(1)",
       "shared_from=add/0"}},
     {"split", {"name=split/2", "input_layers=add/2", "shared_from=split/0"}},
-    {"concat",
-     {"name=output_dummy/2", "input_layers=split/2(1)",
-      "shared_from=output_dummy/0"}},
     {"fully_connected",
      {"name=fc_out/2", "input_layers=split/2(0)", "shared_from=fc_out/0"}},
 
-    /// return seq
-    {"concat", {"name=fc_out", "input_layers=fc_out/0,fc_out/1,fc_out/2"}},
+    /// mapping
+    {"concat",
+     {"name=fc_out/concat_0", "input_layers=fc_out/0,fc_out/1,fc_out/2"}},
+    {"identity", {"name=fc_out", "input_layers=fc_out/concat_0"}},
   };
 
   realizeAndEqual(r, before, expected);
 }
 
-TEST(RecurrentRealizer, recurrent_multi_inout_using_connection_return_seq_p) {
+TEST(RecurrentRealizer, recurrent_multi_inout_using_connection_p) {
   using C = Connection;
   RecurrentRealizer r(
     {
       "unroll_for=3",
-      "as_sequence=fc_out",
       "recurrent_input=lstm,add(2)",
       "recurrent_output=fc_out,split(1)",
     },
@@ -337,21 +246,32 @@ TEST(RecurrentRealizer, recurrent_multi_inout_using_connection_return_seq_p) {
     {"split", {"name=split/2", "input_layers=add/2", "shared_from=split/0"}},
     {"fully_connected",
      {"name=fc_out/2", "input_layers=split/2(0)", "shared_from=fc_out/0"}},
-    {"concat", {"name=fc_out", "input_layers=fc_out/0,fc_out/1,fc_out/2"}},
+
+    /// mapping
+    {"identity", {"name=fc_out", "input_layers=fc_out/2"}},
   };
 
   realizeAndEqual(r, before, expected);
 }
 
-TEST(RecurrentRealizer, recurrent_multi_inout_using_connection_p) {
+TEST(RecurrentRealizer, recurrent_multi_inout_multi_connection_end_p) {
   using C = Connection;
   RecurrentRealizer r(
     {
       "unroll_for=3",
       "recurrent_input=lstm,add(2)",
       "recurrent_output=fc_out,split(1)",
+      "as_sequence=split(1)",
     },
-    {C("source"), C("source2"), C("source3")}, {C("fc_out")});
+    {
+      C("source"),
+      C("source2"),
+      C("source3"),
+    },
+    {
+      C("split(0)"),
+      C("split(1)"),
+    });
 
   /// @note for below graph,
   /// 1. fc_out feds back to lstm
@@ -394,7 +314,12 @@ TEST(RecurrentRealizer, recurrent_multi_inout_using_connection_p) {
       "shared_from=add/0"}},
     {"split", {"name=split/2", "input_layers=add/2", "shared_from=split/0"}},
     {"fully_connected",
-     {"name=fc_out", "input_layers=split/2(0)", "shared_from=fc_out/0"}},
+     {"name=fc_out/2", "input_layers=split/2(0)", "shared_from=fc_out/0"}},
+
+    /// mapping
+    {"concat",
+     {"name=split/concat_1", "input_layers=split/0(1),split/1(1),split/2(1)"}},
+    {"identity", {"name=split", "input_layers=split/2(0),split/concat_1"}},
   };
 
   realizeAndEqual(r, before, expected);
index b0ff568..cbefa0b 100644 (file)
@@ -1088,8 +1088,9 @@ TEST(nntrainerModels, loadFromLayersRecurrent_p) {
                             });
 
   std::vector<std::string> expected_node_names = {
-    "recurrent/fc1/0", "recurrent/fc2/0", "recurrent/fc1/1", "recurrent/fc2/1",
-    "recurrent/fc1/2", "recurrent/fc2/2", "recurrent/fc2"};
+    "recurrent/fc1/0",        "recurrent/fc2/0", "recurrent/fc1/1",
+    "recurrent/fc2/1",        "recurrent/fc1/2", "recurrent/fc2/2",
+    "recurrent/fc2/concat_0", "recurrent/fc2"};
   std::vector<std::string> expected_input_layers = {
     "out_source" /**< input added with external_input */,
     "recurrent/fc1/0",
@@ -1098,10 +1099,13 @@ TEST(nntrainerModels, loadFromLayersRecurrent_p) {
     "recurrent/fc2/1",
     "recurrent/fc1/2",
     "recurrent/fc2/0" /**< out source's first input */,
+    "recurrent/fc2/concat_0", /**< identity's input */
   };
 
   auto graph = nn.getFlatGraph();
   for (unsigned int i = 0; i < graph.size(); ++i) {
+    /// comment below intended
+    // std::cout << *graph.at(i);
     EXPECT_EQ(graph.at(i)->getName(), expected_node_names.at(i)) << "at " << i;
     EXPECT_EQ(graph.at(i)->getInputConnectionName(0),
               expected_input_layers.at(i))