[tests] Add more layer common unittests
authorParichay Kapoor <pk.kapoor@samsung.com>
Wed, 7 Jul 2021 13:03:51 +0000 (22:03 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Thu, 22 Jul 2021 11:47:24 +0000 (20:47 +0900)
Add more layer common unittests without involving runContext for the
layers.

Signed-off-by: Parichay Kapoor <pk.kapoor@samsung.com>
test/unittest/layers/layers_dependent_common_tests.cpp
test/unittest/layers/layers_standalone_common_tests.cpp

index 2b98196..4651bda 100644 (file)
@@ -39,7 +39,7 @@ TEST_P(LayerSemantics, setPropertiesInvalid_n) {
   EXPECT_THROW(layer->setProperty({"unknown_props=2"}), std::invalid_argument);
 }
 
-TEST_P(LayerSemantics, finalizeOutputValidateLayerNode_p) {
+TEST_P(LayerSemantics, finalizeValidateLayerNode_p) {
   auto lnode = nntrainer::createLayerNode(expected_type);
   lnode->setProperty({"input_shape=1:1:1", "name=test"});
   // /** purpose is to set number of outputs to 1 */
@@ -63,3 +63,34 @@ TEST_P(LayerSemantics, finalizeOutputValidateLayerNode_p) {
     EXPECT_THROW(lnode->finalize(), nntrainer::exception::not_supported);
   }
 }
+
+TEST_P(LayerSemantics, getTypeValidateLayerNode_p) {
+  auto lnode = nntrainer::createLayerNode(expected_type);
+  std::string type;
+
+  EXPECT_NO_THROW(type = lnode->getType());
+  EXPECT_GT(type.size(), 0);
+}
+
+TEST_P(LayerSemantics, gettersValidateLayerNode_p) {
+  auto lnode = nntrainer::createLayerNode(expected_type);
+
+  EXPECT_NO_THROW(lnode->supportInPlace());
+  EXPECT_NO_THROW(lnode->requireLabel());
+  EXPECT_NO_THROW(lnode->supportBackwarding());
+}
+
+TEST_P(LayerSemantics, setBatchValidateLayerNode_p) {
+  auto lnode = nntrainer::createLayerNode(expected_type);
+  lnode->setProperty({"input_shape=1:1:1", "name=test"});
+  EXPECT_NO_THROW(lnode->setProperty(valid_properties));
+
+  if (!must_fail) {
+    EXPECT_NO_THROW(lnode->finalize());
+    auto &init_context = lnode->getInitContext();
+    EXPECT_NO_THROW(
+      lnode->setBatch(init_context.getInputDimensions()[0].batch() + 10));
+  } else {
+    EXPECT_THROW(lnode->finalize(), nntrainer::exception::not_supported);
+  }
+}
index eeb0caf..e16ea28 100644 (file)
@@ -41,7 +41,7 @@ TEST_P(LayerSemantics, setPropertiesValidWithInvalid_n) {}
 
 TEST_P(LayerSemantics, setPropertiesValidInvalidOnly_n) {}
 
-TEST_P(LayerSemantics, finalizeOutputValidate_p) {
+TEST_P(LayerSemantics, finalizeValidate_p) {
   nntrainer::TensorDim in_dim({1, 1, 1, 1});
   nntrainer::InitLayerContext init_context =
     nntrainer::InitLayerContext({in_dim}, 1);
@@ -68,4 +68,36 @@ TEST_P(LayerSemantics, finalizeOutputValidate_p) {
   }
 }
 
+TEST_P(LayerSemantics, getTypeValidate_p) {
+  std::string type;
+
+  EXPECT_NO_THROW(type = layer->getType());
+  EXPECT_GT(type.size(), 0);
+}
+
+TEST_P(LayerSemantics, gettersValidate_p) {
+  EXPECT_NO_THROW(layer->supportInPlace());
+  EXPECT_NO_THROW(layer->requireLabel());
+  EXPECT_NO_THROW(layer->supportBackwarding());
+}
+
+TEST_P(LayerSemantics, setBatchValidate_p) {
+  nntrainer::TensorDim in_dim({1, 1, 1, 1});
+  nntrainer::InitLayerContext init_context =
+    nntrainer::InitLayerContext({in_dim}, 1);
+  init_context.validate();
+
+  // set necessary properties only
+  EXPECT_NO_THROW(layer->setProperty(valid_properties));
+
+  if (!must_fail) {
+    EXPECT_NO_THROW(layer->finalize(init_context));
+    EXPECT_NO_THROW(layer->setBatch(
+      init_context, init_context.getInputDimensions()[0].batch() + 10));
+  } else {
+    EXPECT_THROW(layer->finalize(init_context),
+                 nntrainer::exception::not_supported);
+  }
+}
+
 TEST_P(LayerGoldenTest, HelloWorld) { EXPECT_TRUE(true); }