[rnncell] generate testcase for muli in/out of rnncell
authorhyeonseok lee <hs89.lee@samsung.com>
Thu, 28 Apr 2022 07:48:01 +0000 (16:48 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Wed, 11 May 2022 04:52:11 +0000 (13:52 +0900)
 - Generate multi in/out rnncell and replace testcase of 1 input/output of rnncell

Signed-off-by: hyeonseok lee <hs89.lee@samsung.com>
packaging/unittest_layers_v2.tar.gz
packaging/unittest_models_v2.tar.gz
test/input_gen/genLayerTests.py
test/input_gen/genModelsRecurrent_v2.py
test/input_gen/transLayer.py
test/unittest/layers/unittest_layers_rnncell.cpp
test/unittest/models/models_test_utils.cpp
test/unittest/models/unittest_models_recurrent.cpp

index 73f8c89..aa64225 100644 (file)
Binary files a/packaging/unittest_layers_v2.tar.gz and b/packaging/unittest_layers_v2.tar.gz differ
index ae70951..42d3dbe 100644 (file)
Binary files a/packaging/unittest_models_v2.tar.gz and b/packaging/unittest_models_v2.tar.gz differ
index d906ce0..5401bb0 100644 (file)
@@ -96,6 +96,11 @@ if __name__ == "__main__":
                          return_state=False)
     record_single(rnn, (3, 1, 7), "rnn_single_step")
 
+    unit, batch_size, unroll_for, feature_size= [1, 1, 1, 1]
+    rnncell = K.layers.SimpleRNNCell(units=unit,
+                         bias_initializer='glorot_uniform')
+    record_single(rnncell, [(batch_size, feature_size)] + [(batch_size, unit)], "rnncell_single_step", input_type='float')
+
     lstm = K.layers.LSTM(units=5,
                          recurrent_activation="sigmoid",
                          activation="tanh",
index f704d8c..e7fc61b 100644 (file)
@@ -30,24 +30,24 @@ class FCUnroll(torch.nn.Module):
         return output, loss
 
 class RNNCellStacked(torch.nn.Module):
-    def __init__(self, unroll_for=1, num_rnn=1, input_size=1, hidden_size=1):
+    def __init__(self, unroll_for=2, num_rnncell=1, input_size=2, hidden_size=2):
         super().__init__()
-        self.rnns = torch.nn.ModuleList(
+        self.rnncells = torch.nn.ModuleList(
             [
                 torch.nn.RNNCell(input_size, hidden_size)
-                for _ in range(num_rnn)
+                for _ in range(num_rnncell)
             ]
         )
         self.unroll_for = unroll_for
         self.loss = torch.nn.MSELoss()
 
     def forward(self, inputs, labels):
-        hs = [torch.zeros_like(inputs[0]) for _ in self.rnns]
         out = inputs[0]
+        hs = inputs[1:]
         ret = []
         for _ in range(self.unroll_for):
-            for i, rnn in enumerate(self.rnns):
-                hs[i] = rnn(out, hs[i])
+            for i, rnncell in enumerate(self.rnncells):
+                hs[i] = rnncell(out, hs[i])
                 out = hs[i]
             ret.append(out)
 
@@ -198,19 +198,22 @@ if __name__ == "__main__":
         clip=True
     )
 
+
+    unroll_for, num_rnncell, batch_size, unit, feature_size, iteration = [2, 1, 3, 2, 2, 2]
     record_v2(
-        RNNCellStacked(unroll_for=2, num_rnn=1, input_size=2, hidden_size=2),
-        iteration=2,
-        input_dims=[(3, 2)],
-        label_dims=[(3, 2, 2)],
+        RNNCellStacked(unroll_for=unroll_for, num_rnncell=num_rnncell, input_size=feature_size, hidden_size=unit),
+        iteration=iteration,
+        input_dims=[(batch_size, feature_size)] + [(batch_size, unit) for _ in range(num_rnncell)],
+        label_dims=[(batch_size, unroll_for, unit)],
         name="rnncell_single",
     )
 
+    unroll_for, num_rnncell, batch_size, unit, feature_size, iteration = [2, 2, 3, 2, 2, 2]
     record_v2(
-        RNNCellStacked(unroll_for=2, num_rnn=2, input_size=2, hidden_size=2),
-        iteration=2,
-        input_dims=[(3, 2)],
-        label_dims=[(3, 2, 2)],
+        RNNCellStacked(unroll_for=unroll_for, num_rnncell=num_rnncell, input_size=feature_size, hidden_size=unit),
+        iteration=iteration,
+        input_dims=[(batch_size, feature_size)] + [(batch_size, unit) for _ in range(num_rnncell)],
+        label_dims=[(batch_size, unroll_for, unit)],
         name="rnncell_stacked",
     )
 
index ac45a8c..9af1b42 100644 (file)
@@ -214,8 +214,8 @@ class MultiOutLayer(IdentityTransLayer):
         return [layer(tf_output) for layer in self.stub_layers]
 
 ##
-# @brief Translayer for lstmcell layer
-class LSTMCellTransLayer(IdentityTransLayer):
+# @brief Translayer for rnncell/lstmcell layer
+class RNNCELL_LSTMCellTransLayer(IdentityTransLayer):
     def build(self, input_shape):
         if not self.built:
             self.tf_layer.build(input_shape[0])
@@ -283,8 +283,8 @@ def attach_trans_layer(layer):
     if isinstance(layer, CHANNEL_LAST_LAYERS):
         return ChannelLastTransLayer(layer)
 
-    if isinstance(layer, K.layers.LSTMCell):
-        return LSTMCellTransLayer(layer)
+    if isinstance(layer, (K.layers.SimpleRNNCell, K.layers.LSTMCell)):
+        return RNNCELL_LSTMCellTransLayer(layer)
 
     if isinstance(layer, K.layers.GRU):
         return GRUTransLayer(layer)
index 765e8eb..86cb534 100644 (file)
 #include <layers_common_tests.h>
 #include <rnncell.h>
 
-// auto semantic_rnncell = LayerSemanticsParamType(
-//   nntrainer::createLayer<nntrainer::RNNCellLayer>,
-//   nntrainer::RNNCellLayer::type, {"unit=1", "timestep=0", "max_timestep=1"},
-//   0, false, 1);
+auto semantic_rnncell = LayerSemanticsParamType(
+  nntrainer::createLayer<nntrainer::RNNCellLayer>,
+  nntrainer::RNNCellLayer::type, {"unit=1"}, 0, false, 2);
 
-// INSTANTIATE_TEST_CASE_P(RNNCell, LayerSemantics,
-//                         ::testing::Values(semantic_rnncell));
+INSTANTIATE_TEST_CASE_P(RNNCell, LayerSemantics,
+                        ::testing::Values(semantic_rnncell));
 
-// auto rnncell_single_step = LayerGoldenTestParamType(
-//   nntrainer::createLayer<nntrainer::RNNCellLayer>,
-//   {"unit=5", "integrate_bias=true", "timestep=0", "max_timestep=1"},
-//   "3:1:1:7", "rnn_single_step.nnlayergolden",
-//   LayerGoldenTestParamOptions::DEFAULT);
+auto rnncell_single_step = LayerGoldenTestParamType(
+  nntrainer::createLayer<nntrainer::RNNCellLayer>,
+  {"unit=5", "integrate_bias=true"}, "3:1:1:7,3:1:1:5",
+  "rnncell_single_step.nnlayergolden", LayerGoldenTestParamOptions::DEFAULT);
 
-// INSTANTIATE_TEST_CASE_P(RNNCell, LayerGoldenTest,
-//                         ::testing::Values(rnncell_single_step));
+INSTANTIATE_TEST_CASE_P(RNNCell, LayerGoldenTest,
+                        ::testing::Values(rnncell_single_step));
index 371d118..7534676 100644 (file)
@@ -325,7 +325,7 @@ void GraphWatcher::initialize() {
   }
 
   if (nn->compile()) {
-    throw std::invalid_argument("initiation failed");
+    throw std::invalid_argument("compilation failed");
   };
 
   if (nn->initialize()) {
index 9ee9577..3ca4bc9 100644 (file)
@@ -137,6 +137,88 @@ std::unique_ptr<NeuralNetwork> makeFCClipped() {
   return nn;
 }
 
+static std::unique_ptr<NeuralNetwork> makeSingleRNNCell() {
+  std::unique_ptr<NeuralNetwork> nn(new NeuralNetwork());
+  nn->setProperty({"batch_size=3"});
+
+  auto outer_graph = makeGraph({
+    {"input", {"name=input", "input_shape=1:1:2"}},
+    {"input", {"name=input_hidden_state", "input_shape=1:1:2"}},
+    /// here rnncell is being inserted
+    {"mse", {"name=loss", "input_layers=rnncell_scope/a1"}},
+  });
+  for (auto &node : outer_graph) {
+    nn->addLayer(node);
+  }
+
+  auto rnncell = makeGraph({
+    {"input", {"name=dummy_0", "input_shape=1"}},
+    {"input", {"name=dummy_1", "input_shape=1"}},
+    {"rnncell",
+     {"name=a1", "unit=2", "integrate_bias=false",
+      "input_layers=dummy_0, dummy_1"}},
+  });
+
+  nn->addWithReferenceLayers(
+    rnncell, "rnncell_scope", {"input", "input_hidden_state"},
+    {"a1(0)", "a1(1)"}, {"a1"}, ml::train::ReferenceLayersType::RECURRENT,
+    {
+      "unroll_for=2",
+      "as_sequence=a1",
+      "recurrent_input=a1(0), a1(1)",
+      "recurrent_output=a1(0), a1(0)",
+    });
+
+  nn->setProperty({"input_layers=input, input_hidden_state"});
+  nn->setOptimizer(ml::train::createOptimizer("sgd", {"learning_rate = 0.1"}));
+  return nn;
+}
+
+static std::unique_ptr<NeuralNetwork> makeStackedRNNCell() {
+  std::unique_ptr<NeuralNetwork> nn(new NeuralNetwork());
+  nn->setProperty({"batch_size=3"});
+
+  auto outer_graph = makeGraph({
+    {"input", {"name=input", "input_shape=1:1:2"}},
+    {"input", {"name=a1_input_hidden_state", "input_shape=1:1:2"}},
+    {"input", {"name=a2_input_hidden_state", "input_shape=1:1:2"}},
+    /// here rnncells are being inserted
+    {"mse", {"name=loss", "input_layers=rnncell_scope/a2(0)"}},
+  });
+  for (auto &node : outer_graph) {
+    nn->addLayer(node);
+  }
+
+  auto rnncell = makeGraph({
+    {"input", {"name=dummy_0", "input_shape=1"}},
+    {"input", {"name=dummy_1", "input_shape=1"}},
+    {"input", {"name=dummy_2", "input_shape=1"}},
+    {"rnncell",
+     {"name=a1", "unit=2", "integrate_bias=false",
+      "input_layers=dummy_0, dummy_1"}},
+    {"rnncell",
+     {"name=a2", "unit=2", "integrate_bias=false",
+      "input_layers=a1(0), dummy_2"}},
+  });
+
+  nn->addWithReferenceLayers(
+    rnncell, "rnncell_scope",
+    {"input", "a1_input_hidden_state", "a2_input_hidden_state"},
+    {"a1(0)", "a1(1)", "a2(1)"}, {"a2"},
+    ml::train::ReferenceLayersType::RECURRENT,
+    {
+      "unroll_for=2",
+      "as_sequence=a2",
+      "recurrent_input=a1(0), a1(1), a2(1)",
+      "recurrent_output=a2(0), a1(0), a2(0)",
+    });
+
+  nn->setProperty(
+    {"input_layers=input, a1_input_hidden_state, a2_input_hidden_state"});
+  nn->setOptimizer(ml::train::createOptimizer("sgd", {"learning_rate = 0.1"}));
+  return nn;
+}
+
 static std::unique_ptr<NeuralNetwork> makeSingleLSTM() {
   auto nn = std::make_unique<NeuralNetwork>();
   nn->setProperty({"batch_size=3"});
@@ -409,70 +491,6 @@ static std::unique_ptr<NeuralNetwork> makeStackedZoneoutLSTMCell() {
   return nn;
 }
 
-// static std::unique_ptr<NeuralNetwork> makeSingleRNNCell() {
-//   auto nn = std::make_unique<NeuralNetwork>();
-//   nn->setProperty({"batch_size=3"});
-
-//   auto outer_graph = makeGraph({
-//     {"input", {"name=input", "input_shape=1:1:2"}},
-//     /// here rnncell is being inserted
-//     {"mse", {"name=loss", "input_layers=rnncell_scope/a1"}},
-//   });
-//   for (auto &node : outer_graph) {
-//     nn->addLayer(node);
-//   }
-
-//   auto rnncell = makeGraph({
-//     {"rnncell", {"name=a1", "unit=2", "integrate_bias=false"}},
-//   });
-
-//   nn->addWithReferenceLayers(rnncell, "rnncell_scope", {"input"}, {"a1"},
-//                              {"a1"},
-//                              ml::train::ReferenceLayersType::RECURRENT,
-//                              {
-//                                "unroll_for=2",
-//                                "as_sequence=a1",
-//                                "recurrent_input=a1",
-//                                "recurrent_output=a1",
-//                              });
-
-//   nn->setOptimizer(ml::train::createOptimizer("sgd", {"learning_rate =
-//   0.1"})); return nn;
-// }
-
-// static std::unique_ptr<NeuralNetwork> makeStackedRNNCell() {
-//   auto nn = std::make_unique<NeuralNetwork>();
-//   nn->setProperty({"batch_size=3"});
-
-//   auto outer_graph = makeGraph({
-//     {"input", {"name=input", "input_shape=1:1:2"}},
-//     /// here rnncells are being inserted
-//     {"mse", {"name=loss", "input_layers=rnncell_scope/a2"}},
-//   });
-//   for (auto &node : outer_graph) {
-//     nn->addLayer(node);
-//   }
-
-//   auto rnncell = makeGraph({
-//     {"rnncell", {"name=a1", "unit=2", "integrate_bias=false"}},
-//     {"rnncell",
-//      {"name=a2", "unit=2", "integrate_bias=false", "input_layers=a1"}},
-//   });
-
-//   nn->addWithReferenceLayers(rnncell, "rnncell_scope", {"input"}, {"a1"},
-//                              {"a2"},
-//                              ml::train::ReferenceLayersType::RECURRENT,
-//                              {
-//                                "unroll_for=2",
-//                                "as_sequence=a2",
-//                                "recurrent_input=a1",
-//                                "recurrent_output=a2",
-//                              });
-
-//   nn->setOptimizer(ml::train::createOptimizer("sgd", {"learning_rate =
-//   0.1"})); return nn;
-// }
-
 static std::unique_ptr<NeuralNetwork> makeSingleGRUCell() {
   auto nn = std::make_unique<NeuralNetwork>();
   nn->setProperty({"batch_size=3"});
@@ -567,6 +585,9 @@ INSTANTIATE_TEST_CASE_P(
     mkModelTc_V2(makeFC, "fc_unroll_stacked", ModelTestOption::COMPARE_V2),
     mkModelTc_V2(makeFCClipped, "fc_unroll_stacked_clipped",
                  ModelTestOption::COMPARE_V2),
+    mkModelTc_V2(makeSingleRNNCell, "rnncell_single", ModelTestOption::ALL_V2),
+    mkModelTc_V2(makeStackedRNNCell, "rnncell_stacked",
+                 ModelTestOption::ALL_V2),
     mkModelTc_V2(makeSingleLSTM, "lstm_single", ModelTestOption::ALL_V2),
     mkModelTc_V2(makeStackedLSTM, "lstm_stacked", ModelTestOption::ALL_V2),
     mkModelTc_V2(makeSingleBidirectionalLSTM, "bidirectional_lstm_single",
@@ -613,10 +634,6 @@ INSTANTIATE_TEST_CASE_P(
                  ModelTestOption::ALL_V2),
     mkModelTc_V2(makeStackedZoneoutLSTMCell, "zoneout_lstm_stacked_100_100",
                  ModelTestOption::ALL_V2),
-    // mkModelTc_V2(makeSingleRNNCell, "rnncell_single__1",
-    //              ModelTestOption::ALL_V2),
-    // mkModelTc_V2(makeStackedRNNCell, "rnncell_stacked__1",
-    //              ModelTestOption::ALL_V2),
     mkModelTc_V2(makeSingleGRUCell, "grucell_single", ModelTestOption::ALL_V2),
     mkModelTc_V2(makeStackedGRUCell, "grucell_stacked",
                  ModelTestOption::ALL_V2),