[Pooling2D/Test] Add model tests
authorJihoon Lee <jhoon.it.lee@samsung.com>
Mon, 29 Mar 2021 12:49:44 +0000 (21:49 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Tue, 30 Mar 2021 07:24:29 +0000 (16:24 +0900)
Add model test for various types of pooling2d

**Self evaluation:**
1. Build test: [X]Passed [ ]Failed [ ]Skipped
2. Run test: [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Jihoon Lee <jhoon.it.lee@samsung.com>
packaging/unittest_models.tar.gz
test/input_gen/genModelTests.py
test/input_gen/transLayer.py
test/unittest/unittest_nntrainer_models.cpp

index c5a3aaa..1e99594 100644 (file)
Binary files a/packaging/unittest_models.tar.gz and b/packaging/unittest_models.tar.gz differ
index fc3d528..665c60c 100644 (file)
@@ -193,3 +193,70 @@ if __name__ == "__main__":
     )
 
     conv_layer_tc(strides=(3, 3))(file_name="conv_uneven_strides.info", debug="summary")
+
+    pool_layer_tc = lambda pool_layer: partial(
+        record,
+        model=[
+            K.Input(shape=(2, 5, 3)),
+            pool_layer,
+            K.layers.Activation("sigmoid"),
+            K.layers.Flatten(),
+            K.layers.Dense(10),
+            K.layers.Activation("softmax"),
+        ],
+        optimizer=opt.SGD(learning_rate=0.1),
+        iteration=10,
+        input_shape=(3, 2, 5, 3),
+        label_shape=(3, 10),
+        loss_fn_str="cross_softmax",
+    )
+
+    pool_layer_tc(K.layers.MaxPooling2D(pool_size=3, strides=1, padding="same"))(
+        file_name="pooling_max_same_padding.info",  # debug="output"
+    )  # padding: 1, 1
+
+    pool_layer_tc(K.layers.MaxPooling2D(pool_size=3, strides=1, padding="valid"))(
+        file_name="pooling_max_valid_padding.info",  # debug="output"
+    )  # padding: 1, 1
+
+    pool_layer_tc(K.layers.AveragePooling2D(pool_size=3, strides=1, padding="same"))(
+        file_name="pooling_avg_same_padding.info",  # debug="dx"
+    )  # padding: 1, 1
+
+    pool_layer_tc(K.layers.AveragePooling2D(pool_size=3, strides=1, padding="valid"))(
+        file_name="pooling_avg_valid_padding.info",  # debug="dx"
+    )
+
+    pool_layer_tc(K.layers.GlobalAvgPool2D(data_format="channels_first"))(
+        file_name="pooling_global_avg.info",  # debug="summary"
+    )
+
+    pool_layer_tc(K.layers.GlobalMaxPool2D(data_format="channels_first"))(
+        file_name="pooling_global_max.info",  # debug="dx"
+    )
+
+    pool_layer_tc2 = lambda pool_layer: partial(
+        record,
+        model=[
+            K.Input(shape=(2, 3, 5)),
+            pool_layer,
+            K.layers.Activation("sigmoid"),
+            K.layers.Flatten(),
+            K.layers.Dense(10),
+            K.layers.Activation("softmax"),
+        ],
+        optimizer=opt.SGD(learning_rate=0.1),
+        iteration=10,
+        input_shape=(3, 2, 3, 5),
+        label_shape=(3, 10),
+        loss_fn_str="cross_softmax",
+    )
+
+    pool_layer_tc2(K.layers.MaxPooling2D(pool_size=3, strides=2, padding="same"))(
+        file_name="pooling_max_same_padding_multi_stride.info",  # debug="dx"
+    )
+
+    pool_layer_tc2(K.layers.AveragePooling2D(pool_size=3, strides=2, padding="same"))(
+        file_name="pooling_avg_same_padding_multi_stride.info", # debug="output"
+    )
+
index c1a88f8..87c0384 100644 (file)
@@ -137,14 +137,23 @@ class ChannelLastTransLayer(AbstractTransLayer):
         return [self._nntr_kernel(t) for t in weights]
 
 
-CHANNEL_LAST_LAYERS = (K.layers.Conv2D, K.layers.AveragePooling2D)
+CHANNEL_LAST_LAYERS = (
+    K.layers.Conv2D,
+    K.layers.AveragePooling2D,
+    K.layers.AvgPool2D,
+    K.layers.MaxPooling2D,
+    K.layers.MaxPool2D,
+)
 
 
 ##
 # @brief A factory function to attach translayer to existing layer
 # if nothing should be attached, it does not attach the layer
 def attach_trans_layer(layer):
-    if isinstance(layer, (K.layers.BatchNormalization, K.layers.normalization_v2.BatchNormalization)):
+    if isinstance(
+        layer,
+        (K.layers.BatchNormalization, K.layers.normalization_v2.BatchNormalization),
+    ):
         return BatchNormTransLayer(layer)
 
     if isinstance(layer, CHANNEL_LAST_LAYERS):
index 6e6cac3..e88a590 100644 (file)
@@ -766,6 +766,118 @@ INI conv_none_loss_validate(
   }
 );
 
+INI pooling_max_same_padding(
+  "pooling_max_same_padding",
+  {
+    nn_base + "learning_rate=0.1 | optimizer=sgd | loss=cross | batch_size=3",
+        I("input") + input_base + "input_shape=2:5:3",
+    I("pooling_1") + pooling_base +
+            "pooling=max | pool_size = 3,3 | padding =1,1" + "input_layers=input",
+    I("act_1") + sigmoid_base + "input_layers=pooling_1",
+    I("flatten", "type=flatten")+ "input_layers=act_1",
+    I("outputlayer") + fc_base + "unit = 10" + "input_layers=flatten",
+    I("act_2") + softmax_base + "input_layers=outputlayer"
+  }
+);
+
+INI pooling_max_same_padding_multi_stride(
+  "pooling_max_same_padding_multi_stride",
+  {
+    nn_base + "learning_rate=0.1 | optimizer=sgd | loss=cross | batch_size=3",
+        I("input") + input_base + "input_shape=2:3:5",
+    I("pooling_1") + pooling_base +
+            "pooling=max | pool_size = 3,3 | padding =1,1 | stride=2,2" + "input_layers=input",
+    I("act_1") + sigmoid_base + "input_layers=pooling_1",
+    I("flatten", "type=flatten")+ "input_layers=act_1",
+    I("outputlayer") + fc_base + "unit = 10" + "input_layers=flatten",
+    I("act_2") + softmax_base + "input_layers=outputlayer"
+  }
+);
+
+INI pooling_max_valid_padding(
+  "pooling_max_valid_padding",
+  {
+    nn_base + "learning_rate=0.1 | optimizer=sgd | loss=cross | batch_size=3",
+        I("input") + input_base + "input_shape=2:5:3",
+    I("pooling_1") + pooling_base +
+            "pooling=max | pool_size = 3,3 | padding =0,0" + "input_layers=input",
+    I("act_1") + sigmoid_base + "input_layers=pooling_1",
+    I("flatten", "type=flatten")+ "input_layers=act_1",
+    I("outputlayer") + fc_base + "unit = 10" + "input_layers=flatten",
+    I("act_2") + softmax_base + "input_layers=outputlayer"
+  }
+);
+
+INI pooling_avg_same_padding(
+  "pooling_avg_same_padding",
+  {
+    nn_base + "learning_rate=0.1 | optimizer=sgd | loss=cross | batch_size=3",
+        I("input") + input_base + "input_shape=2:5:3",
+    I("pooling_1") + pooling_base +
+            "pooling=average | pool_size = 3,3 | padding =1,1" + "input_layers=input",
+    I("act_1") + sigmoid_base + "input_layers=pooling_1",
+    I("flatten", "type=flatten")+ "input_layers=act_1",
+    I("outputlayer") + fc_base + "unit = 10" + "input_layers=flatten",
+    I("act_2") + softmax_base + "input_layers=outputlayer"
+  }
+);
+
+INI pooling_avg_valid_padding(
+  "pooling_avg_valid_padding",
+  {
+    nn_base + "learning_rate=0.1 | optimizer=sgd | loss=cross | batch_size=3",
+        I("input") + input_base + "input_shape=2:5:3",
+    I("pooling_1") + pooling_base +
+            "pooling=average | pool_size = 3,3 | padding =0,0" + "input_layers=input",
+    I("act_1") + sigmoid_base + "input_layers=pooling_1",
+    I("flatten", "type=flatten")+ "input_layers=act_1",
+    I("outputlayer") + fc_base + "unit = 10" + "input_layers=flatten",
+    I("act_2") + softmax_base + "input_layers=outputlayer"
+  }
+);
+
+INI pooling_avg_same_padding_multi_stride(
+  "pooling_avg_same_padding_multi_stride",
+  {
+    nn_base + "learning_rate=0.1 | optimizer=sgd | loss=cross | batch_size=3",
+        I("input") + input_base + "input_shape=2:3:5",
+    I("pooling_1") + pooling_base +
+            "pooling=average | pool_size = 3,3 | padding =1,1 | stride=2,2" + "input_layers=input",
+    I("act_1") + sigmoid_base + "input_layers=pooling_1",
+    I("flatten", "type=flatten")+ "input_layers=act_1",
+    I("outputlayer") + fc_base + "unit = 10" + "input_layers=flatten",
+    I("act_2") + softmax_base + "input_layers=outputlayer"
+  }
+);
+
+INI pooling_global_avg(
+  "pooling_global_avg",
+  {
+    nn_base + "learning_rate=0.1 | optimizer=sgd | loss=cross | batch_size=3",
+        I("input") + input_base + "input_shape=2:5:3",
+    I("pooling_1") + pooling_base +
+            "pooling=global_average" + "input_layers=input",
+    I("act_1") + sigmoid_base + "input_layers=pooling_1",
+    I("flatten", "type=flatten")+ "input_layers=act_1",
+    I("outputlayer") + fc_base + "unit = 10" + "input_layers=flatten",
+    I("act_2") + softmax_base + "input_layers=outputlayer"
+  }
+);
+
+INI pooling_global_max(
+  "pooling_global_max",
+  {
+    nn_base + "learning_rate=0.1 | optimizer=sgd | loss=cross | batch_size=3",
+        I("input") + input_base + "input_shape=2:5:3",
+    I("pooling_1") + pooling_base +
+            "pooling=global_max" + "input_layers=input",
+    I("act_1") + sigmoid_base + "input_layers=pooling_1",
+    I("flatten", "type=flatten")+ "input_layers=act_1",
+    I("outputlayer") + fc_base + "unit = 10" + "input_layers=flatten",
+    I("act_2") + softmax_base + "input_layers=outputlayer"
+  }
+);
+
 INI mnist_conv_cross_one_input = INI("mnist_conv_cross_one_input") + mnist_conv_cross + "model/batch_size=1";
 
 INSTANTIATE_TEST_CASE_P(
@@ -786,7 +898,16 @@ INSTANTIATE_TEST_CASE_P(
     mkModelTc(conv_uneven_strides, "3:1:1:10", 10),
     mkModelTc(conv_same_padding_multi_stride, "3:1:1:10", 10),
     mkModelTc(conv_no_loss_validate, "3:1:1:10", 1),
-    mkModelTc(conv_none_loss_validate, "3:1:1:10", 1)
+    mkModelTc(conv_none_loss_validate, "3:1:1:10", 1),
+    /**< single pooling layer test */
+    mkModelTc(pooling_max_same_padding, "3:1:1:10", 10),
+    mkModelTc(pooling_max_same_padding_multi_stride, "3:1:1:10", 10),
+    mkModelTc(pooling_max_valid_padding, "3:1:1:10", 10),
+    mkModelTc(pooling_avg_same_padding, "3:1:1:10", 10),
+    mkModelTc(pooling_avg_same_padding_multi_stride, "3:1:1:10", 10),
+    mkModelTc(pooling_avg_valid_padding, "3:1:1:10", 10),
+    mkModelTc(pooling_global_avg, "3:1:1:10", 10),
+    mkModelTc(pooling_global_max, "3:1:1:10", 10)
 // / #if gtest_version <= 1.7.0
 ));
 /// #else gtest_version > 1.8.0