[test] Enable c/cc diabled tests
authorParichay Kapoor <pk.kapoor@samsung.com>
Fri, 30 Jul 2021 07:11:08 +0000 (16:11 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Tue, 3 Aug 2021 02:07:46 +0000 (11:07 +0900)
This patch enables c/cc disabled tests.
Corresponding required updates in the tests are also added.

Signed-off-by: Parichay Kapoor <pk.kapoor@samsung.com>
nntrainer/app_context.h
test/ccapi/unittest_ccapi.cpp
test/meson.build
test/tizen_capi/meson.build
test/tizen_capi/unittest_tizen_capi.cpp
test/unittest/unittest_nntrainer_appcontext.cpp

index 9c5c758..990a992 100644 (file)
@@ -289,7 +289,7 @@ public:
    */
   template <typename T>
   static PtrType<T> unknownFactory(const PropsType &props) {
-    throw std::runtime_error("cannot create unknown object");
+    throw std::invalid_argument("cannot create unknown object");
   }
 
 private:
index 3af3b64..199d594 100644 (file)
@@ -83,7 +83,7 @@ TEST(ccapi_layer, construct_02_p) {
   EXPECT_EQ(layer->getType(), "concat");
 
   EXPECT_NO_THROW(layer = ml::train::layer::MultiOut());
-  EXPECT_EQ(layer->getType(), "output");
+  EXPECT_EQ(layer->getType(), "multiout");
 
 #ifdef ENABLE_NNSTREAMER_BACKBONE
   EXPECT_NO_THROW(layer = ml::train::layer::BackboneNNStreamer());
@@ -115,10 +115,13 @@ TEST(ccapi_layer, construct_03_p) {
   std::shared_ptr<ml::train::Layer> layer;
 
   EXPECT_NO_THROW(layer = ml::train::loss::MSE());
-  EXPECT_EQ(layer->getType(), "loss");
+  EXPECT_EQ(layer->getType(), "mse");
 
-  EXPECT_NO_THROW(layer = ml::train::loss::CrossEntropy());
-  EXPECT_EQ(layer->getType(), "loss");
+  EXPECT_NO_THROW(layer = ml::train::loss::CrossEntropySigmoid());
+  EXPECT_EQ(layer->getType(), "cross_sigmoid");
+
+  EXPECT_NO_THROW(layer = ml::train::loss::CrossEntropySoftmax());
+  EXPECT_EQ(layer->getType(), "cross_softmax");
 }
 
 /**
@@ -175,7 +178,6 @@ static nntrainer::IniSection dataset("Dataset", "BufferSize=100"
 static nntrainer::IniSection inputlayer("inputlayer",
                                         "Type = input"
                                         "| Input_Shape = 1:1:62720"
-                                        "| bias_initializer = zeros"
                                         "| Normalization = true"
                                         "| Activation = sigmoid");
 
@@ -219,9 +221,8 @@ TEST(nntrainer_ccapi, train_dataset_with_file_01_p) {
   EXPECT_NO_THROW(model =
                     ml::train::createModel(ml::train::ModelType::NEURAL_NET));
 
-  EXPECT_NO_THROW(layer = ml::train::layer::Input({"input_shape=1:1:62720",
-                                                   "normalization=true",
-                                                   "bias_initializer=zeros"}));
+  EXPECT_NO_THROW(layer = ml::train::layer::Input(
+                    {"input_shape=1:1:62720", "normalization=true"}));
   EXPECT_NO_THROW(model->addLayer(layer));
 
   EXPECT_NO_THROW(
@@ -276,9 +277,8 @@ TEST(nntrainer_ccapi, train_dataset_with_generator_01_p) {
   EXPECT_NO_THROW(model =
                     ml::train::createModel(ml::train::ModelType::NEURAL_NET));
 
-  EXPECT_NO_THROW(layer = ml::train::layer::Input({"input_shape=1:1:62720",
-                                                   "normalization=true",
-                                                   "bias_initializer=zeros"}));
+  EXPECT_NO_THROW(layer = ml::train::layer::Input(
+                    {"input_shape=1:1:62720", "normalization=true"}));
   EXPECT_NO_THROW(model->addLayer(layer));
 
   EXPECT_NO_THROW(
@@ -331,9 +331,8 @@ TEST(nntrainer_ccapi, train_batch_size_update_after) {
   EXPECT_NO_THROW(model =
                     ml::train::createModel(ml::train::ModelType::NEURAL_NET));
 
-  EXPECT_NO_THROW(layer = ml::train::layer::Input({"input_shape=1:1:62720",
-                                                   "normalization=true",
-                                                   "bias_initializer=zeros"}));
+  EXPECT_NO_THROW(layer = ml::train::layer::Input(
+                    {"input_shape=1:1:62720", "normalization=true"}));
   EXPECT_NO_THROW(model->addLayer(layer));
 
   EXPECT_NO_THROW(
index 2a4836d..4b81d88 100644 (file)
@@ -35,7 +35,7 @@ if get_option('enable-capi')
 endif
 
 if get_option('enable-ccapi')
-  subdir('ccapi')
+  subdir('ccapi')
 endif
 
 if get_option('enable-nnstreamer-tensor-filter')
index 6754c73..707243a 100644 (file)
@@ -3,8 +3,7 @@ unittest_tizen_deps = [
   nntrainer_test_deps,
 ]
 
-# unittest_name_list = ['', '_layer', '_optimizer', '_dataset']
-unittest_name_list = ['_optimizer', '_dataset']
+unittest_name_list = ['', '_layer', '_optimizer', '_dataset']
 unittest_prefix = 'unittest_tizen_capi'
 
 foreach test_name : unittest_name_list
index 594f61d..f631d29 100644 (file)
@@ -51,7 +51,6 @@ static nntrainer::IniSection dataset("Dataset", "BufferSize=100"
 static nntrainer::IniSection inputlayer("inputlayer",
                                         "Type = input"
                                         "| Input_Shape = 1:1:62720"
-                                        "| bias_initializer = zeros"
                                         "| Normalization = true"
                                         "| Activation = sigmoid");
 
@@ -201,8 +200,7 @@ TEST(nntrainer_capi_nnmodel, compile_05_p) {
   EXPECT_EQ(status, ML_ERROR_NONE);
 
   status = ml_train_layer_set_property(layers[0], "input_shape=1:1:62720",
-                                       "normalization=true",
-                                       "bias_initializer=zeros", NULL);
+                                       "normalization=true", NULL);
   EXPECT_EQ(status, ML_ERROR_NONE);
 
   status = ml_train_model_add_layer(model, layers[0]);
@@ -266,8 +264,7 @@ TEST(nntrainer_capi_nnmodel, compile_06_n) {
   EXPECT_EQ(status, ML_ERROR_NONE);
 
   status = ml_train_layer_set_property(layers[0], "input_shape=1:1:62720",
-                                       "normalization=true",
-                                       "bias_initializer=zeros", NULL);
+                                       "normalization=true", NULL);
   EXPECT_EQ(status, ML_ERROR_NONE);
 
   /** Find layer before adding */
@@ -483,8 +480,7 @@ TEST(nntrainer_capi_nnmodel, addLayer_04_p) {
   EXPECT_EQ(status, ML_ERROR_NONE);
 
   status = ml_train_layer_set_property(layers[0], "input_shape=1:1:62720",
-                                       "normalization=true",
-                                       "bias_initializer=zeros", NULL);
+                                       "normalization=true", NULL);
   EXPECT_EQ(status, ML_ERROR_NONE);
 
   status = ml_train_model_add_layer(model, layers[0]);
@@ -591,8 +587,7 @@ TEST(nntrainer_capi_nnmodel, create_optimizer_02_p) {
   EXPECT_EQ(status, ML_ERROR_NONE);
 
   status = ml_train_layer_set_property(layers[0], "input_shape=1:1:62720",
-                                       "normalization=true",
-                                       "bias_initializer=zeros", NULL);
+                                       "normalization=true", NULL);
   EXPECT_EQ(status, ML_ERROR_NONE);
 
   status = ml_train_model_add_layer(model, layers[0]);
@@ -641,8 +636,7 @@ TEST(nntrainer_capi_nnmodel, create_optimizer_03_p) {
   EXPECT_EQ(status, ML_ERROR_NONE);
 
   status = ml_train_layer_set_property(layers[0], "input_shape=1:1:62720",
-                                       "normalization=true",
-                                       "bias_initializer=zeros", NULL);
+                                       "normalization=true", NULL);
   EXPECT_EQ(status, ML_ERROR_NONE);
 
   status = ml_train_model_add_layer(model, layers[0]);
@@ -694,9 +688,9 @@ TEST(nntrainer_capi_nnmodel, train_with_file_01_p) {
   status = ml_train_layer_create(&layers[0], ML_TRAIN_LAYER_TYPE_INPUT);
   EXPECT_EQ(status, ML_ERROR_NONE);
 
-  status = ml_train_layer_set_property(
-    layers[0], "input_shape=1:1:62720", "normalization=true",
-    "bias_initializer=zeros", "name=inputlayer", NULL);
+  status =
+    ml_train_layer_set_property(layers[0], "input_shape=1:1:62720",
+                                "normalization=true", "name=inputlayer", NULL);
   EXPECT_EQ(status, ML_ERROR_NONE);
 
   status = ml_train_model_add_layer(model, layers[0]);
@@ -768,9 +762,9 @@ TEST(nntrainer_capi_nnmodel, train_with_generator_01_p) {
   status = ml_train_layer_create(&layers[0], ML_TRAIN_LAYER_TYPE_INPUT);
   EXPECT_EQ(status, ML_ERROR_NONE);
 
-  status = ml_train_layer_set_property(
-    layers[0], "input_shape=1:1:62720", "normalization=true",
-    "bias_initializer=zeros", "name=inputlayer", NULL);
+  status =
+    ml_train_layer_set_property(layers[0], "input_shape=1:1:62720",
+                                "normalization=true", "name=inputlayer", NULL);
   EXPECT_EQ(status, ML_ERROR_NONE);
 
   status = ml_train_model_add_layer(model, layers[0]);
@@ -872,9 +866,9 @@ TEST(nntrainer_capi_nnmodel, train_with_generator_02_p) {
   status = ml_train_layer_create(&layers[0], ML_TRAIN_LAYER_TYPE_INPUT);
   EXPECT_EQ(status, ML_ERROR_NONE);
 
-  status = ml_train_layer_set_property(
-    layers[0], "input_shape=1:1:100", "normalization=true",
-    "bias_initializer=true", "name=inputlayer", NULL);
+  status =
+    ml_train_layer_set_property(layers[0], "input_shape=1:1:100",
+                                "normalization=true", "name=inputlayer", NULL);
   EXPECT_EQ(status, ML_ERROR_NONE);
 
   status = ml_train_layer_create(&layers[1], ML_TRAIN_LAYER_TYPE_FC);
index 208aa62..2e2bc9f 100644 (file)
@@ -251,7 +251,7 @@ TEST(nntrainerAppContextObjs, callingUnknownFactoryOptimizerWithKey_n) {
 
   EXPECT_EQ(num, 999);
   EXPECT_THROW(ac.createObject<ml::train::Optimizer>("unknown"),
-               std::runtime_error);
+               std::invalid_argument);
 }
 
 TEST(nntrainerAppContextObjs, callingUnknownFactoryOptimizerWithIntKey_n) {
@@ -262,7 +262,8 @@ TEST(nntrainerAppContextObjs, callingUnknownFactoryOptimizerWithIntKey_n) {
     999);
 
   EXPECT_EQ(num, 999);
-  EXPECT_THROW(ac.createObject<ml::train::Optimizer>(num), std::runtime_error);
+  EXPECT_THROW(ac.createObject<ml::train::Optimizer>(num),
+               std::invalid_argument);
 }
 
 /**