[test] reorder tizen capi unittest
authorhyeonseok lee <hs89.lee@samsung.com>
Thu, 13 Apr 2023 07:34:57 +0000 (16:34 +0900)
committerjijoong.moon <jijoong.moon@samsung.com>
Tue, 18 Apr 2023 04:54:41 +0000 (13:54 +0900)
 - Reorder unittest for sequential order

Signed-off-by: hyeonseok lee <hs89.lee@samsung.com>
test/tizen_capi/unittest_tizen_capi.cpp
test/tizen_capi/unittest_tizen_capi_optimizer.cpp

index eed6674..24c2aeb 100644 (file)
@@ -151,54 +151,9 @@ TEST(nntrainer_capi_nnmodel, compile_01_p) {
 /**
  * @brief Neural Network Model Compile Test
  */
-TEST(nntrainer_capi_nnmodel, compile_with_single_param_01_p) {
-  ml_train_model_h handle = NULL;
-  int status = ML_ERROR_NONE;
-
-  ScopedIni s("capi_test_compile_with_single_param_01_p",
-              {model_base, optimizer, dataset, inputlayer, outputlayer});
-
-  status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &handle);
-  EXPECT_EQ(status, ML_ERROR_NONE);
-  status =
-    ml_train_model_compile_with_single_param(handle, "loss=cross|epochs=2");
-  EXPECT_EQ(status, ML_ERROR_NONE);
-  status = ml_train_model_destroy(handle);
-  EXPECT_EQ(status, ML_ERROR_NONE);
-}
-
-/**
- * @brief Neural Network Model Compile Test
- */
-TEST(nntrainer_capi_nnmodel, construct_conf_01_n) {
-  ml_train_model_h handle = NULL;
-  int status = ML_ERROR_NONE;
-  std::string config_file = "/test/cannot_find.ini";
-  status = ml_train_model_construct_with_conf(config_file.c_str(), &handle);
-  EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
-}
-
-/**
- * @brief Neural Network Model Compile Test
- */
-TEST(nntrainer_capi_nnmodel, construct_conf_02_n) {
-  ml_train_model_h handle = NULL;
-  int status = ML_ERROR_NONE;
-
-  ScopedIni s("capi_test_compile_03_n",
-              {model_base, optimizer, dataset, inputlayer + "Input_Shape=1:1:0",
-               outputlayer});
-
-  status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &handle);
-  EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
-}
-
-/**
- * @brief Neural Network Model Compile Test
- */
 TEST(nntrainer_capi_nnmodel, compile_02_n) {
   int status = ML_ERROR_NONE;
-  std::string config_file = "./test_compile_03_n.ini";
+  std::string config_file = "./test_compile_02_n.ini";
   status = ml_train_model_compile(NULL);
   EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
 }
@@ -206,7 +161,7 @@ TEST(nntrainer_capi_nnmodel, compile_02_n) {
 /**
  * @brief Neural Network Model Optimizer Test
  */
-TEST(nntrainer_capi_nnmodel, compile_05_p) {
+TEST(nntrainer_capi_nnmodel, compile_03_p) {
   int status = ML_ERROR_NONE;
 
   ml_train_model_h model;
@@ -271,7 +226,7 @@ TEST(nntrainer_capi_nnmodel, compile_05_p) {
 /**
  * @brief Neural Network Model Optimizer Test
  */
-TEST(nntrainer_capi_nnmodel, compile_06_n) {
+TEST(nntrainer_capi_nnmodel, compile_04_n) {
   int status = ML_ERROR_NONE;
 
   ml_train_model_h model;
@@ -349,18 +304,44 @@ TEST(nntrainer_capi_nnmodel, compile_06_n) {
 /**
  * @brief Neural Network Model Compile Test
  */
-TEST(nntrainer_capi_nnmodel, compile_with_single_param_01_n) {
+TEST(nntrainer_capi_nnmodel, construct_conf_01_n) {
+  ml_train_model_h handle = NULL;
+  int status = ML_ERROR_NONE;
+  std::string config_file = "/test/cannot_find.ini";
+  status = ml_train_model_construct_with_conf(config_file.c_str(), &handle);
+  EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+}
+
+/**
+ * @brief Neural Network Model Compile Test
+ */
+TEST(nntrainer_capi_nnmodel, construct_conf_02_n) {
+  ml_train_model_h handle = NULL;
+  int status = ML_ERROR_NONE;
+
+  ScopedIni s("capi_test_construct_conf_02_n",
+              {model_base, optimizer, dataset, inputlayer + "Input_Shape=1:1:0",
+               outputlayer});
+
+  status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &handle);
+  EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+}
+
+/**
+ * @brief Neural Network Model Compile Test
+ */
+TEST(nntrainer_capi_nnmodel, compile_with_single_param_01_p) {
   ml_train_model_h handle = NULL;
   int status = ML_ERROR_NONE;
 
-  ScopedIni s("capi_test_compile_with_single_param_01_n",
+  ScopedIni s("capi_test_compile_with_single_param_01_p",
               {model_base, optimizer, dataset, inputlayer, outputlayer});
 
   status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &handle);
   EXPECT_EQ(status, ML_ERROR_NONE);
   status =
-    ml_train_model_compile_with_single_param(handle, "loss=cross epochs=2");
-  EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+    ml_train_model_compile_with_single_param(handle, "loss=cross|epochs=2");
+  EXPECT_EQ(status, ML_ERROR_NONE);
   status = ml_train_model_destroy(handle);
   EXPECT_EQ(status, ML_ERROR_NONE);
 }
@@ -378,7 +359,7 @@ TEST(nntrainer_capi_nnmodel, compile_with_single_param_02_n) {
   status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &handle);
   EXPECT_EQ(status, ML_ERROR_NONE);
   status =
-    ml_train_model_compile_with_single_param(handle, "loss=cross,epochs=2");
+    ml_train_model_compile_with_single_param(handle, "loss=cross epochs=2");
   EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
   status = ml_train_model_destroy(handle);
   EXPECT_EQ(status, ML_ERROR_NONE);
@@ -391,54 +372,46 @@ TEST(nntrainer_capi_nnmodel, compile_with_single_param_03_n) {
   ml_train_model_h handle = NULL;
   int status = ML_ERROR_NONE;
 
-  ScopedIni s("capi_test_compile_with_single_param_02_n",
+  ScopedIni s("capi_test_compile_with_single_param_03_n",
               {model_base, optimizer, dataset, inputlayer, outputlayer});
 
   status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &handle);
   EXPECT_EQ(status, ML_ERROR_NONE);
   status =
-    ml_train_model_compile_with_single_param(handle, "loss=cross!epochs=2");
+    ml_train_model_compile_with_single_param(handle, "loss=cross,epochs=2");
   EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
   status = ml_train_model_destroy(handle);
   EXPECT_EQ(status, ML_ERROR_NONE);
 }
+
 /**
- * @brief Neural Network Model Train Test
+ * @brief Neural Network Model Compile Test
  */
-TEST(nntrainer_capi_nnmodel, train_01_p) {
+TEST(nntrainer_capi_nnmodel, compile_with_single_param_04_n) {
   ml_train_model_h handle = NULL;
   int status = ML_ERROR_NONE;
 
-  ScopedIni s("capi_test_train_01_p",
-              {model_base + "batch_size = 16", optimizer,
-               dataset + "-BufferSize", inputlayer, outputlayer});
+  ScopedIni s("capi_test_compile_with_single_param_04_n",
+              {model_base, optimizer, dataset, inputlayer, outputlayer});
 
   status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &handle);
   EXPECT_EQ(status, ML_ERROR_NONE);
-
-  status = ml_train_model_compile(handle, NULL);
-  EXPECT_EQ(status, ML_ERROR_NONE);
-
-  status = ml_train_model_run(handle, NULL);
-  EXPECT_EQ(status, ML_ERROR_NONE);
-
-  /** Compare training statistics */
-  nntrainer_capi_model_comp_metrics(handle, 3.911289, 2.933979, 10.4167);
-
+  status =
+    ml_train_model_compile_with_single_param(handle, "loss=cross!epochs=2");
+  EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
   status = ml_train_model_destroy(handle);
   EXPECT_EQ(status, ML_ERROR_NONE);
 }
-
 /**
  * @brief Neural Network Model Train Test
  */
-TEST(nntrainer_capi_nnmodel, train_with_single_param_01_p) {
+TEST(nntrainer_capi_nnmodel, train_01_p) {
   ml_train_model_h handle = NULL;
   int status = ML_ERROR_NONE;
 
-  ScopedIni s(
-    "capi_test_train_with_single_param_01_p",
-    {model_base, optimizer, dataset + "-BufferSize", inputlayer, outputlayer});
+  ScopedIni s("capi_test_train_01_p",
+              {model_base + "batch_size = 16", optimizer,
+               dataset + "-BufferSize", inputlayer, outputlayer});
 
   status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &handle);
   EXPECT_EQ(status, ML_ERROR_NONE);
@@ -446,12 +419,11 @@ TEST(nntrainer_capi_nnmodel, train_with_single_param_01_p) {
   status = ml_train_model_compile(handle, NULL);
   EXPECT_EQ(status, ML_ERROR_NONE);
 
-  status =
-    ml_train_model_run_with_single_param(handle, "epochs=2|batch_size=16");
+  status = ml_train_model_run(handle, NULL);
   EXPECT_EQ(status, ML_ERROR_NONE);
 
   /** Compare training statistics */
-  nntrainer_capi_model_comp_metrics(handle, 3.67021, 3.26736, 10.4167);
+  nntrainer_capi_model_comp_metrics(handle, 3.911289, 2.933979, 10.4167);
 
   status = ml_train_model_destroy(handle);
   EXPECT_EQ(status, ML_ERROR_NONE);
@@ -472,7 +444,7 @@ TEST(nntrainer_capi_nnmodel, train_02_n) {
 TEST(nntrainer_capi_nnmodel, train_03_n) {
   ml_train_model_h handle = NULL;
   int status = ML_ERROR_NONE;
-  ScopedIni s("capi_test_train_01_p",
+  ScopedIni s("capi_test_train_03_n",
               {model_base + "batch_size = 16", optimizer,
                dataset + "-BufferSize", inputlayer, outputlayer});
 
@@ -489,12 +461,40 @@ TEST(nntrainer_capi_nnmodel, train_03_n) {
 /**
  * @brief Neural Network Model Train Test
  */
-TEST(nntrainer_capi_nnmodel, train_with_single_param_01_n) {
+TEST(nntrainer_capi_nnmodel, train_with_single_param_01_p) {
   ml_train_model_h handle = NULL;
   int status = ML_ERROR_NONE;
 
   ScopedIni s(
-    "capi_test_train_with_single_param_01_n",
+    "capi_test_train_with_single_param_01_p",
+    {model_base, optimizer, dataset + "-BufferSize", inputlayer, outputlayer});
+
+  status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &handle);
+  EXPECT_EQ(status, ML_ERROR_NONE);
+
+  status = ml_train_model_compile(handle, NULL);
+  EXPECT_EQ(status, ML_ERROR_NONE);
+
+  status =
+    ml_train_model_run_with_single_param(handle, "epochs=2|batch_size=16");
+  EXPECT_EQ(status, ML_ERROR_NONE);
+
+  /** Compare training statistics */
+  nntrainer_capi_model_comp_metrics(handle, 3.77080, 3.18020, 10.4167);
+
+  status = ml_train_model_destroy(handle);
+  EXPECT_EQ(status, ML_ERROR_NONE);
+}
+
+/**
+ * @brief Neural Network Model Train Test
+ */
+TEST(nntrainer_capi_nnmodel, train_with_single_param_02_n) {
+  ml_train_model_h handle = NULL;
+  int status = ML_ERROR_NONE;
+
+  ScopedIni s(
+    "capi_test_train_with_single_param_02_n",
     {model_base, optimizer, dataset + "-BufferSize", inputlayer, outputlayer});
 
   status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &handle);
@@ -514,12 +514,12 @@ TEST(nntrainer_capi_nnmodel, train_with_single_param_01_n) {
 /**
  * @brief Neural Network Model Train Test
  */
-TEST(nntrainer_capi_nnmodel, train_with_single_param_02_n) {
+TEST(nntrainer_capi_nnmodel, train_with_single_param_03_n) {
   ml_train_model_h handle = NULL;
   int status = ML_ERROR_NONE;
 
   ScopedIni s(
-    "capi_test_train_with_single_param_02_n",
+    "capi_test_train_with_single_param_03_n",
     {model_base, optimizer, dataset + "-BufferSize", inputlayer, outputlayer});
 
   status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &handle);
@@ -539,12 +539,12 @@ TEST(nntrainer_capi_nnmodel, train_with_single_param_02_n) {
 /**
  * @brief Neural Network Model Train Test
  */
-TEST(nntrainer_capi_nnmodel, train_with_single_param_03_n) {
+TEST(nntrainer_capi_nnmodel, train_with_single_param_04_n) {
   ml_train_model_h handle = NULL;
   int status = ML_ERROR_NONE;
 
   ScopedIni s(
-    "capi_test_train_with_single_param_02_n",
+    "capi_test_train_with_single_param_04_n",
     {model_base, optimizer, dataset + "-BufferSize", inputlayer, outputlayer});
 
   status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &handle);
@@ -706,13 +706,13 @@ TEST(nntrainer_capi_nnmodel, addLayer_05_n) {
 /**
  * @brief Neural Network Model Add layer test
  */
-TEST(nntrainer_capi_nnmodel, addLayer_07_n) {
+TEST(nntrainer_capi_nnmodel, addLayer_06_n) {
   int status = ML_ERROR_NONE;
 
   ml_train_model_h model = NULL;
   ml_train_layer_h layer = NULL;
 
-  ScopedIni s("capi_test_addLayer_07_n",
+  ScopedIni s("capi_test_addLayer_06_n",
               {model_base, optimizer, dataset, inputlayer, outputlayer});
 
   status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &model);
@@ -867,6 +867,46 @@ TEST(nntrainer_capi_nnmodel, getLayer_04_n) {
 }
 
 /**
+ * @brief Neural Network Model Get Layer Test
+ */
+TEST(nntrainer_capi_nnmodel, getLayer_05_n) {
+  int status = ML_ERROR_NONE;
+
+  ml_train_model_h model;
+  ml_train_layer_h get_layer;
+
+  std::string default_name = "inputlayer", modified_name = "renamed_inputlayer";
+  char *default_summary, *modified_summary = nullptr;
+
+  ScopedIni s("getLayer_05_p", {model_base, inputlayer});
+
+  status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &model);
+  EXPECT_EQ(status, ML_ERROR_NONE);
+
+  status =
+    ml_train_model_get_summary(model, ML_TRAIN_SUMMARY_MODEL, &default_summary);
+  EXPECT_EQ(status, ML_ERROR_NONE);
+
+  std::string default_summary_str(default_summary);
+  EXPECT_NE(default_summary_str.find(default_name), std::string::npos);
+  free(default_summary);
+
+  status = ml_train_model_get_layer(model, default_name.c_str(), &get_layer);
+  EXPECT_EQ(status, ML_ERROR_NONE);
+
+  status = ml_train_layer_set_property(get_layer,
+                                       ("name=" + modified_name).c_str(), NULL);
+  EXPECT_EQ(status, ML_ERROR_NONE);
+
+  ///@todo need to fix bug (Unable to get renamed layer)
+  status = ml_train_model_get_layer(model, modified_name.c_str(), &get_layer);
+  EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+
+  status = ml_train_model_destroy(model);
+  EXPECT_EQ(status, ML_ERROR_NONE);
+}
+
+/**
  * @brief Neural Network Model Get Weight  Test
  */
 TEST(nntrainer_capi_nnmodel, getWeight_01) {
@@ -923,46 +963,6 @@ TEST(nntrainer_capi_nnmodel, getWeight_01) {
 }
 
 /**
- * @brief Neural Network Model Get Layer Test
- */
-TEST(nntrainer_capi_nnmodel, getLayer_05_n) {
-  int status = ML_ERROR_NONE;
-
-  ml_train_model_h model;
-  ml_train_layer_h get_layer;
-
-  std::string default_name = "inputlayer", modified_name = "renamed_inputlayer";
-  char *default_summary, *modified_summary = nullptr;
-
-  ScopedIni s("getLayer_02_p", {model_base, inputlayer});
-
-  status = ml_train_model_construct_with_conf(s.getIniName().c_str(), &model);
-  EXPECT_EQ(status, ML_ERROR_NONE);
-
-  status =
-    ml_train_model_get_summary(model, ML_TRAIN_SUMMARY_MODEL, &default_summary);
-  EXPECT_EQ(status, ML_ERROR_NONE);
-
-  std::string default_summary_str(default_summary);
-  EXPECT_NE(default_summary_str.find(default_name), std::string::npos);
-  free(default_summary);
-
-  status = ml_train_model_get_layer(model, default_name.c_str(), &get_layer);
-  EXPECT_EQ(status, ML_ERROR_NONE);
-
-  status = ml_train_layer_set_property(get_layer,
-                                       ("name=" + modified_name).c_str(), NULL);
-  EXPECT_EQ(status, ML_ERROR_NONE);
-
-  ///@todo need to fix bug (Unable to get renamed layer)
-  status = ml_train_model_get_layer(model, modified_name.c_str(), &get_layer);
-  EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
-
-  status = ml_train_model_destroy(model);
-  EXPECT_EQ(status, ML_ERROR_NONE);
-}
-
-/**
  * @brief Neural Network Model Optimizer Test
  */
 TEST(nntrainer_capi_nnmodel, create_optimizer_01_p) {
index 6ac32d5..f52da48 100644 (file)
@@ -72,7 +72,7 @@ TEST(nntrainer_capi_nnopt, create_delete_04_n) {
 /**
  * @brief Neural Network Optimizer set Property Test (positive test)
  */
-TEST(nntrainer_capi_nnopt, setOptimizer_01_p) {
+TEST(nntrainer_capi_nnopt, setProperty_01_p) {
   ml_train_optimizer_h handle;
   int status;
   status = ml_train_optimizer_create(&handle, ML_TRAIN_OPTIMIZER_TYPE_ADAM);
@@ -87,7 +87,7 @@ TEST(nntrainer_capi_nnopt, setOptimizer_01_p) {
 /**
  * @brief Neural Network Optimizer Set Property Test (positive test)
  */
-TEST(nntrainer_capi_nnopt, setOptimizer_02_p) {
+TEST(nntrainer_capi_nnopt, setProperty_02_p) {
   ml_train_optimizer_h handle;
   int status;
   status = ml_train_optimizer_create(&handle, ML_TRAIN_OPTIMIZER_TYPE_ADAM);
@@ -103,7 +103,7 @@ TEST(nntrainer_capi_nnopt, setOptimizer_02_p) {
 /**
  * @brief Neural Network Optimizer Set Property Test (negative test)
  */
-TEST(nntrainer_capi_nnopt, setOptimizer_03_n) {
+TEST(nntrainer_capi_nnopt, setProperty_03_n) {
   ml_train_optimizer_h handle;
   int status;
   status = ml_train_optimizer_create(&handle, ML_TRAIN_OPTIMIZER_TYPE_ADAM);
@@ -118,7 +118,7 @@ TEST(nntrainer_capi_nnopt, setOptimizer_03_n) {
 /**
  * @brief Neural Network Optimizer Set Property Test (negative test)
  */
-TEST(nntrainer_capi_nnopt, setOptimizer_04_n) {
+TEST(nntrainer_capi_nnopt, setProperty_04_n) {
   ml_train_optimizer_h handle = NULL;
   int status;
 
@@ -131,7 +131,7 @@ TEST(nntrainer_capi_nnopt, setOptimizer_04_n) {
 /**
  * @brief Neural Network Optimizer Set Property Test (negative test)
  */
-TEST(nntrainer_capi_nnopt, setOptimizer_05_n) {
+TEST(nntrainer_capi_nnopt, setProperty_05_n) {
   ml_train_optimizer_h handle = NULL;
   int status;
   status = ml_train_optimizer_create(&handle, ML_TRAIN_OPTIMIZER_TYPE_ADAM);
@@ -143,7 +143,7 @@ TEST(nntrainer_capi_nnopt, setOptimizer_05_n) {
 /**
  * @brief Neural Network Optimizer Set Property Test (positive test)
  */
-TEST(nntrainer_capi_nnopt, setOptimizer_with_single_param_06_p) {
+TEST(nntrainer_capi_nnopt, setProperty_with_single_param_01_p) {
   ml_train_optimizer_h handle;
   int status;
   status = ml_train_optimizer_create(&handle, ML_TRAIN_OPTIMIZER_TYPE_ADAM);
@@ -158,7 +158,7 @@ TEST(nntrainer_capi_nnopt, setOptimizer_with_single_param_06_p) {
 /**
  * @brief Neural Network Optimizer Set Property Test (negative test)
  */
-TEST(nntrainer_capi_nnopt, setOptimizer_with_single_param_07_n) {
+TEST(nntrainer_capi_nnopt, setProperty_with_single_param_02_n) {
   ml_train_optimizer_h handle;
   int status;
   status = ml_train_optimizer_create(&handle, ML_TRAIN_OPTIMIZER_TYPE_ADAM);
@@ -173,7 +173,7 @@ TEST(nntrainer_capi_nnopt, setOptimizer_with_single_param_07_n) {
 /**
  * @brief Neural Network Optimizer Set Property Test (negative test)
  */
-TEST(nntrainer_capi_nnopt, setOptimizer_with_single_param_08_n) {
+TEST(nntrainer_capi_nnopt, setProperty_with_single_param_03_n) {
   ml_train_optimizer_h handle;
   int status;
   status = ml_train_optimizer_create(&handle, ML_TRAIN_OPTIMIZER_TYPE_ADAM);