[ LSTM ] Add Multi-Layerd LSTM Unittest
authorjijoong.moon <jijoong.moon@samsung.com>
Thu, 27 May 2021 11:20:32 +0000 (20:20 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Wed, 9 Jun 2021 08:11:13 +0000 (17:11 +0900)
This commit includes,
  . Multi-Layerd LSTM Unit test.

**Self evaluation:**
1. Build test:  [X]Passed [ ]Failed [ ]Skipped
2. Run test:  [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: jijoong.moon <jijoong.moon@samsung.com>
packaging/unittest_models.tar.gz
test/input_gen/genModelTests.py
test/unittest/unittest_nntrainer_models.cpp

index 51f41d7..b655f46 100644 (file)
Binary files a/packaging/unittest_models.tar.gz and b/packaging/unittest_models.tar.gz differ
index 105a159..343c788 100644 (file)
@@ -393,5 +393,38 @@ if __name__ == "__main__":
         loss_fn_str="mse"
     )
     lstm = K.layers.LSTM(2, recurrent_activation='sigmoid', activation='tanh', return_sequences=True)
-    lstm_layer_return_sequence_with_batch_n(lstm)(file_name="lstm_return_sequence_with_batch_n.info", debug=["summary", "initial_weights", "dx", "output", "layer_name", "label","weights","gradients"],)    
+    lstm_layer_return_sequence_with_batch_n(lstm)(file_name="lstm_return_sequence_with_batch_n.info", debug=["summary", "initial_weights", "dx", "output", "layer_name", "label","weights","gradients"],)
 
+    multi_lstm_layer_return_sequence = partial(
+        record,
+        model=[
+            K.Input(batch_shape=(1, 2, 1)),
+            K.layers.LSTM(2, recurrent_activation='sigmoid', activation='tanh', return_sequences=True),
+            K.layers.LSTM(2, recurrent_activation='sigmoid', activation='tanh'),
+            K.layers.Dense(1)
+        ],
+        optimizer=opt.SGD(learning_rate=0.1),
+        iteration=1,
+        input_shape=(1,2,1),
+        label_shape=(1,1,1),
+        is_onehot=False,
+        loss_fn_str="mse"
+    )
+    multi_lstm_layer_return_sequence(file_name="multi_lstm_return_sequence.info", debug=["summary", "initial_weights", "dx", "output", "layer_name", "label","weights","gradients"],)    
+
+    multi_lstm_layer_return_sequence_with_batch_n = partial(
+        record,
+        model=[
+            K.Input(batch_shape=(2, 2, 1)),
+            K.layers.LSTM(2, recurrent_activation='sigmoid', activation='tanh', return_sequences=True),
+            K.layers.LSTM(2, recurrent_activation='sigmoid', activation='tanh'),
+            K.layers.Dense(1)
+        ],
+        optimizer=opt.SGD(learning_rate=0.1),
+        iteration=2,
+        input_shape=(2,2,1),
+        label_shape=(2,1),
+        is_onehot=False,
+        loss_fn_str="mse"
+    )
+    multi_lstm_layer_return_sequence_with_batch_n(file_name="multi_lstm_return_sequence_with_batch_n.info", debug=["summary", "initial_weights", "dx", "output", "layer_name", "label","weights","gradients"],)    
index 63400a4..5f22f4b 100644 (file)
@@ -1116,6 +1116,36 @@ INI lstm_return_sequence_with_batch_n(
   }
 );
 
+INI multi_lstm_return_sequence(
+  "multi_lstm_return_sequence",
+  {
+    nn_base + "loss=mse | batch_size=1",
+    sgd_base + "learning_rate = 0.1",
+    I("input") + input_base + "input_shape=1:2:1",
+    I("lstm") + lstm_base +
+      "unit = 2" + "input_layers=input"+ "return_sequences=true",
+    I("lstm2") + lstm_base +
+      "unit = 2" + "input_layers=lstm",
+    I("outputlayer") + fc_base + "unit = 1" + "input_layers=lstm2"
+  }
+);
+
+
+INI multi_lstm_return_sequence_with_batch_n(
+  "multi_lstm_return_sequence_with_batch_n",
+  {
+    nn_base + "loss=mse | batch_size=2",
+    sgd_base + "learning_rate = 0.1",
+    I("input") + input_base + "input_shape=1:2:1",
+    I("lstm") + lstm_base +
+      "unit = 2" + "input_layers=input"+ "return_sequences=true",
+    I("lstm2") + lstm_base +
+      "unit = 2" + "input_layers=lstm",
+    I("outputlayer") + fc_base + "unit = 1" + "input_layers=lstm2"
+  }
+);
+
+
 
 INSTANTIATE_TEST_CASE_P(
   nntrainerModelAutoTests, nntrainerModelTest, ::testing::Values(
@@ -1163,7 +1193,9 @@ INSTANTIATE_TEST_CASE_P(
     mkModelTc(lstm_basic, "1:1:1:1", 1),
     mkModelTc(lstm_return_sequence, "1:1:2:1", 1),
     mkModelTc(lstm_return_sequence_with_batch, "2:1:2:1", 1),
-    mkModelTc(lstm_return_sequence_with_batch_n, "2:1:2:1", 2)
+    mkModelTc(lstm_return_sequence_with_batch_n, "2:1:2:1", 2),
+    mkModelTc(multi_lstm_return_sequence, "1:1:1:1", 1),
+    mkModelTc(multi_lstm_return_sequence_with_batch_n, "2:1:1:1", 2)
 // / #if gtest_version <= 1.7.0
 ));
 /// #else gtest_version > 1.8.0