[ Unit Test ] Fix LSTM / RNN Unittest
authorjijoong.moon <jijoong.moon@samsung.com>
Wed, 16 Jun 2021 12:08:38 +0000 (21:08 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Thu, 17 Jun 2021 03:40:20 +0000 (12:40 +0900)
This commit includes,
  . fix wrong unittest cases of lstm & rnn

**Self evaluation:**
1. Build test:  [X]Passed [ ]Failed [ ]Skipped
2. Run test:  [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: jijoong.moon <jijoong.moon@samsung.com>
packaging/unittest_models.tar.gz
test/input_gen/genModelTests.py
test/input_gen/recorder.py
test/unittest/unittest_nntrainer_models.cpp

index 1057331..7f9f0ec 100644 (file)
Binary files a/packaging/unittest_models.tar.gz and b/packaging/unittest_models.tar.gz differ
index 172b6e8..2ecdc1e 100644 (file)
@@ -364,26 +364,28 @@ if __name__ == "__main__":
     lstm_layer_tc(1, 2, True)(file_name="lstm_return_sequence.info")
     lstm_layer_tc(2, 2, True)(file_name="lstm_return_sequence_with_batch.info")
 
-    record(
-        file_name="multi_lstm_return_sequence.info",
+    multi_lstm_layer_tc = lambda batch, time: partial(
+        record,
         model=[
-            K.Input(batch_shape=(1, 2, 1)),
+            K.Input(batch_shape=(batch, time, 1)),
             K.layers.LSTM(
-                2,
+                time,
                 recurrent_activation="sigmoid",
                 activation="tanh",
                 return_sequences=True,
             ),
-            K.layers.LSTM(2, recurrent_activation="sigmoid", activation="tanh"),
+            K.layers.LSTM(time, recurrent_activation="sigmoid", activation="tanh"),
             K.layers.Dense(1),
         ],
         optimizer=opt.SGD(learning_rate=0.1),
         iteration=10,
-        input_shape=(1, 2, 1),
-        label_shape=(1, 1, 1),
+        input_shape=(batch, time, 1),
+        label_shape=(batch, 1),
         is_onehot=False,
         loss_fn_str="mse",
     )
+    multi_lstm_layer_tc(1,2)(file_name="multi_lstm_return_sequence.info")
+    multi_lstm_layer_tc(2,2)(file_name="multi_lstm_return_sequence_with_batch.info")
 
     rnn_layer_tc = lambda batch, time, return_sequences: partial(
         record,
@@ -403,18 +405,24 @@ if __name__ == "__main__":
     rnn_layer_tc(1, 2, True)(file_name="rnn_return_sequences.info")
     rnn_layer_tc(2, 2, True)(file_name="rnn_return_sequence_with_batch.info")
 
-    record(
-        file_name="multi_rnn_return_sequence.info",
+    multi_rnn_layer_tc = lambda batch, time: partial(
+        record,
         model=[
-            K.Input(batch_shape=(1, 2, 1)),
-            K.layers.SimpleRNN(2, return_sequences=True),
-            K.layers.SimpleRNN(2),
+            K.Input(batch_shape=(batch, time, 1)),
+            K.layers.SimpleRNN(
+                time,
+                return_sequences=True,
+            ),
+            K.layers.SimpleRNN(time),
             K.layers.Dense(1),
         ],
         optimizer=opt.SGD(learning_rate=0.1),
         iteration=10,
-        input_shape=(1, 2, 1),
-        label_shape=(1, 1, 1),
+        input_shape=(batch, time, 1),
+        label_shape=(batch, 1),
         is_onehot=False,
         loss_fn_str="mse",
     )
+    multi_rnn_layer_tc(1,2)(file_name="multi_rnn_return_sequence.info")
+    multi_rnn_layer_tc(2,2)(file_name="multi_rnn_return_sequence_with_batch.info")
+    
index e5022b0..720122d 100644 (file)
@@ -115,7 +115,7 @@ def _debug_print(
 # @brief generate data using uniform data from a function and save to the file.
 # @note one-hot label is supported for now, this could be extended if needed.
 def prepare_data(model, input_shape, label_shape, writer_fn, is_onehot, **kwargs):
-    initial_input = _rand_like(input_shape)
+    initial_input = _rand_like(input_shape) / 10
     if is_onehot:
         label = tf.one_hot(
             indices=np.random.randint(0, label_shape[1] - 1, label_shape[0]),
index 7a38afe..7397e00 100644 (file)
@@ -1153,20 +1153,22 @@ INI multi_lstm_return_sequence(
   }
 );
 
-INI rnn_return_sequence_with_batch(
-  "rnn_return_sequence_with_batch",
+INI multi_lstm_return_sequence_with_batch(
+  "multi_lstm_return_sequence_with_batch",
   {
     nn_base + "loss=mse | batch_size=2",
     sgd_base + "learning_rate = 0.1",
     I("input") + input_base + "input_shape=1:2:1",
-    I("rnn") + rnn_base +
+    I("lstm") + lstm_base +
       "unit = 2" + "input_layers=input"+ "return_sequences=true",
-    I("outputlayer") + fc_base + "unit = 1" + "input_layers=rnn"
+    I("lstm2") + lstm_base +
+      "unit = 2" + "input_layers=lstm",
+    I("outputlayer") + fc_base + "unit = 1" + "input_layers=lstm2"
   }
 );
 
-INI rnn_return_sequence_with_batch_n(
-  "rnn_return_sequence_with_batch_n",
+INI rnn_return_sequence_with_batch(
+  "rnn_return_sequence_with_batch",
   {
     nn_base + "loss=mse | batch_size=2",
     sgd_base + "learning_rate = 0.1",
@@ -1191,6 +1193,20 @@ INI multi_rnn_return_sequence(
   }
 );
 
+INI multi_rnn_return_sequence_with_batch(
+  "multi_rnn_return_sequence_with_batch",
+  {
+    nn_base + "loss=mse | batch_size=2",
+    sgd_base + "learning_rate = 0.1",
+    I("input") + input_base + "input_shape=1:2:1",
+    I("rnn") + rnn_base +
+      "unit = 2" + "input_layers=input"+ "return_sequences=true",
+    I("rnn2") + rnn_base +
+      "unit = 2" + "input_layers=rnn",
+    I("outputlayer") + fc_base + "unit = 1" + "input_layers=rnn2"
+  }
+);
+
 INSTANTIATE_TEST_CASE_P(
   nntrainerModelAutoTests, nntrainerModelTest, ::testing::Values(
     mkModelTc(fc_sigmoid_mse, "3:1:1:10", 10),
@@ -1238,10 +1254,12 @@ INSTANTIATE_TEST_CASE_P(
     mkModelTc(lstm_return_sequence, "1:1:2:1", 10),
     mkModelTc(lstm_return_sequence_with_batch, "2:1:2:1", 10),
     mkModelTc(multi_lstm_return_sequence, "1:1:1:1", 10),
+    mkModelTc(multi_lstm_return_sequence_with_batch, "2:1:1:1", 10),
     mkModelTc(rnn_basic, "1:1:1:1", 10),
     mkModelTc(rnn_return_sequences, "1:1:2:1", 10),
     mkModelTc(rnn_return_sequence_with_batch, "2:1:2:1", 10),
-    mkModelTc(multi_rnn_return_sequence, "1:1:1:1", 10)
+    mkModelTc(multi_rnn_return_sequence, "1:1:1:1", 10),
+    mkModelTc(multi_rnn_return_sequence_with_batch, "2:1:1:1", 10)
 ), [](const testing::TestParamInfo<nntrainerModelTest::ParamType>& info){
  return std::get<0>(info.param).getName();
 });