[unittest] add negative unittest cases
authorSeungbaek Hong <sb92.hong@samsung.com>
Mon, 14 Oct 2024 11:37:56 +0000 (20:37 +0900)
committerJijoong Moon <jijoong.moon@samsung.com>
Fri, 18 Oct 2024 00:34:14 +0000 (09:34 +0900)
- added negative unittest cases
- deleted unittest cases that cause GTEST.MEANINGLESS_ASSERTION defect

**Self evaluation:**
1. Build test:   [X]Passed [ ]Failed [ ]Skipped
2. Run test:     [X]Passed [ ]Failed [ ]Skipped

Signed-off-by: Seungbaek Hong <sb92.hong@samsung.com>
14 files changed:
test/tizen_capi/unittest_tizen_capi_layer.cpp
test/unittest/compiler/unittest_realizer.cpp
test/unittest/datasets/unittest_iteration_queue.cpp
test/unittest/layers/layers_dependent_common_tests.cpp
test/unittest/layers/layers_golden_tests.cpp
test/unittest/memory/unittest_cache_pool.cpp
test/unittest/models/models_golden_test.cpp
test/unittest/unittest_base_properties.cpp
test/unittest/unittest_common_properties.cpp
test/unittest/unittest_nntrainer_internal.cpp
test/unittest/unittest_nntrainer_lr_scheduler.cpp
test/unittest/unittest_nntrainer_tensor.cpp
test/unittest/unittest_nntrainer_tensor_fp16.cpp
test/unittest/unittest_nntrainer_tensor_nhwc.cpp

index 7bd07c9b4e75246d210ef2aa9c5d506722288e6c..ce5e7dec42987ce29ab4d27544f44d7cf35df588 100644 (file)
@@ -26,7 +26,7 @@
 #include <nntrainer_test_util.h>
 
 /**
- * @brief Neural Network Layer Create / Delete Test (possitive test)
+ * @brief Neural Network Layer Create / Delete Test (positive test)
  */
 TEST(nntrainer_capi_nnlayer, create_delete_01_p) {
   ml_train_layer_h handle;
@@ -38,7 +38,7 @@ TEST(nntrainer_capi_nnlayer, create_delete_01_p) {
 }
 
 /**
- * @brief Neural Network Layer Create / Delete Test (possitive test)
+ * @brief Neural Network Layer Create / Delete Test (positive test)
  */
 TEST(nntrainer_capi_nnlayer, create_delete_02_p) {
   ml_train_layer_h handle;
@@ -232,6 +232,58 @@ TEST(nntrainer_capi_nnlayer, setproperty_11_n) {
   EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
 }
 
+/**
+ * @brief Neural Network Set Property Test (negative test)
+ */
+TEST(nntrainer_capi_nnlayer, setproperty_12_n) {
+  ml_train_layer_h handle = nullptr;
+  int status;
+  /**
+   * If property is set which is an inappropriate way, then error.
+   */
+  status = ml_train_layer_set_property(handle, "relu", NULL);
+  EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+}
+
+/**
+ * @brief Neural Network Set Property Test (negative test)
+ */
+TEST(nntrainer_capi_nnlayer, setproperty_13_n) {
+  ml_train_layer_h handle = nullptr;
+  int status;
+  /**
+   * If property is set which is an inappropriate way, then error.
+   */
+  status = ml_train_layer_set_property(handle, "=relu", NULL);
+  EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+}
+
+/**
+ * @brief Neural Network Set Property Test (negative test)
+ */
+TEST(nntrainer_capi_nnlayer, setproperty_14_n) {
+  ml_train_layer_h handle = nullptr;
+  int status;
+  /**
+   * If property is set which is an inappropriate way, then error.
+   */
+  status = ml_train_layer_set_property(handle, "=0.01", NULL);
+  EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+}
+
+/**
+ * @brief Neural Network Set Property Test (negative test)
+ */
+TEST(nntrainer_capi_nnlayer, setproperty_15_n) {
+  ml_train_layer_h handle = nullptr;
+  int status;
+  /**
+   * If property is set which is an inappropriate way, then error.
+   */
+  status = ml_train_layer_set_property(handle, "activation:relu", NULL);
+  EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+}
+
 /**
  * @brief Neural Network Layer Set Property Test (positive test)
  */
@@ -280,6 +332,55 @@ TEST(nntrainer_capi_nnlayer, setproperty_with_single_param_03_n) {
   EXPECT_EQ(status, ML_ERROR_NONE);
 }
 
+/**
+ * @brief Neural Network Layer Set Property Test (negative test )
+ */
+TEST(nntrainer_capi_nnlayer, setproperty_with_single_param_04_n) {
+  ml_train_layer_h handle;
+  int status;
+  status = ml_train_layer_create(&handle, ML_TRAIN_LAYER_TYPE_INPUT);
+  EXPECT_EQ(status, ML_ERROR_NONE);
+  status = ml_train_layer_set_property_with_single_param(
+    handle, "input_shape=1:1:6270 / normalization=true / standardization=true");
+  EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+
+  status = ml_train_layer_destroy(handle);
+  EXPECT_EQ(status, ML_ERROR_NONE);
+}
+
+/**
+ * @brief Neural Network Layer Set Property Test (negative test )
+ */
+TEST(nntrainer_capi_nnlayer, setproperty_with_single_param_05_n) {
+  ml_train_layer_h handle;
+  int status;
+  status = ml_train_layer_create(&handle, ML_TRAIN_LAYER_TYPE_INPUT);
+  EXPECT_EQ(status, ML_ERROR_NONE);
+  status = ml_train_layer_set_property_with_single_param(
+    handle,
+    "input_shape=1:1:6270 // normalization=true // standardization=true");
+  EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+
+  status = ml_train_layer_destroy(handle);
+  EXPECT_EQ(status, ML_ERROR_NONE);
+}
+
+/**
+ * @brief Neural Network Layer Set Property Test (negative test )
+ */
+TEST(nntrainer_capi_nnlayer, setproperty_with_single_param_06_n) {
+  ml_train_layer_h handle;
+  int status;
+  status = ml_train_layer_create(&handle, ML_TRAIN_LAYER_TYPE_INPUT);
+  EXPECT_EQ(status, ML_ERROR_NONE);
+  status = ml_train_layer_set_property_with_single_param(
+    handle, "input_shape=1:1:6270 : normalization=true : standardization=true");
+  EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+
+  status = ml_train_layer_destroy(handle);
+  EXPECT_EQ(status, ML_ERROR_NONE);
+}
+
 /*** since tizen 6.5 ***/
 
 /**
index 5759fa0612ae726f52fe8bba55da80e5c4b539f4..b4fee235d54b05ac9a7d59b860847ad0f3a7f684 100644 (file)
@@ -942,5 +942,5 @@ TEST(LossRealizer, loss_realizer_p) {
   };
   LossRealizer r;
   std::vector<std::unique_ptr<nntrainer::GraphRealizer>> realizers;
-  compileAndRealizeAndEqual(r, realizers, before, after);
+  EXPECT_NO_THROW(compileAndRealizeAndEqual(r, realizers, before, after));
 }
index 376687d701c3309a86ab2a0e322958dfe21153ff..808472f19be92d0c4eabd37d8ae45d1992e14cfa 100644 (file)
@@ -575,7 +575,15 @@ GTEST_PARAMETER_TEST(IterQueue, IterQueueScenarios,
                                        multi_slot_single_batch,
                                        single_slot_single_batch));
 
-TEST(IterQueue, constructEmptySlots_n) {
+TEST(IterQueue, constructEmptySlots_01_n) {
+  EXPECT_ANY_THROW(nntrainer::IterationQueue(0, {}, {}));
+}
+
+TEST(IterQueue, constructEmptySlots_02_n) {
+  EXPECT_ANY_THROW(nntrainer::IterationQueue(0, {}, {{1}}));
+}
+
+TEST(IterQueue, constructEmptySlots_03_n) {
   EXPECT_ANY_THROW(nntrainer::IterationQueue(0, {{1}}, {{1}}));
 }
 
@@ -583,17 +591,21 @@ TEST(IterQueue, constructEmptyInput_n) {
   EXPECT_ANY_THROW(nntrainer::IterationQueue(1, {}, {{1}}));
 }
 
-TEST(IterQueue, constructNotConsistentBatchSizeBetweenInputs_n) {
+TEST(IterQueue, constructNotConsistentBatchSizeBetweenInputs_01_n) {
   EXPECT_ANY_THROW(
     nntrainer::IterationQueue(1, {{3, 1, 1, 10}, {2, 1, 1, 10}}, {}));
 }
 
-TEST(IterQueue, constructNotConsistentBatchSizeInLabel_n) {
+TEST(IterQueue, constructNotConsistentBatchSizeBetweenInputs_02_n) {
+  EXPECT_ANY_THROW(nntrainer::IterationQueue(1, {{3, 1}, {2, 1}}, {{1, 0}}));
+}
+
+TEST(IterQueue, constructNotConsistentBatchSizeInLabel_01_n) {
   EXPECT_ANY_THROW(nntrainer::IterationQueue(1, {{3, 1, 1, 10}, {3, 1, 1, 10}},
                                              {{2, 1, 1, 10}}));
 }
 
-TEST(IterQueue, constructNotConsistentBatchSizeInLabel2_n) {
+TEST(IterQueue, constructNotConsistentBatchSizeInLabel_02_n) {
   EXPECT_ANY_THROW(nntrainer::IterationQueue(1, {{3, 1, 1, 10}, {3, 1, 1, 10}},
                                              {{3, 1, 1, 10}, {2, 1, 1, 10}}));
 }
index f1c87b2426ec5e12dd417ef22b52e9c521d8507b..068fe4e232d6d076f9913f260f08c28d0520c97c 100644 (file)
@@ -39,12 +39,18 @@ TEST_P(LayerPropertySemantics, setPropertiesInvalid_n) {
   EXPECT_THROW(layer->setProperty({valid_properties}), std::invalid_argument);
 }
 
-TEST_P(LayerSemantics, setPropertiesInvalid_n) {
+TEST_P(LayerSemantics, setPropertiesInvalid_01_n) {
   auto lnode = nntrainer::createLayerNode(expected_type);
   /** must not crash */
   EXPECT_THROW(layer->setProperty({"unknown_props=2"}), std::invalid_argument);
 }
 
+TEST_P(LayerSemantics, setPropertiesInvalid_02_n) {
+  auto lnode = nntrainer::createLayerNode(expected_type);
+  /** must not crash */
+  EXPECT_THROW(layer->setProperty({"unknown_props:2"}), std::invalid_argument);
+}
+
 TEST_P(LayerSemantics, finalizeValidateLayerNode_p) {
   auto lnode = nntrainer::createLayerNode(expected_type);
   std::vector<std::string> props = {"name=test"};
index 152f9e09343b1481ef1fc8b5ec347071f7f2ca95..464318f33305aab93a365f3795c583d248493401 100644 (file)
@@ -426,6 +426,4 @@ TEST_P(LayerGoldenTest, run) {
 
   compareRunContext(rc, golden_file, skip_calc_grad, skip_calc_deriv,
                     dropout_compare_60_percent, skip_cos_sim);
-
-  EXPECT_TRUE(true); // stub test for tcm
 }
index 6f12e05e9603e2b01039e15e05d5c89bbed68ba4..051ed3e6e85efc8bfa4e9446b107f651284f72ea 100644 (file)
@@ -67,11 +67,6 @@ public:
   MockCachePool *pool;
 };
 
-/**
- * @brief creation and destruction
- */
-TEST_F(CachePoolTest, create_destroy) {}
-
 /**
  * @brief get cache memory
  */
index e755a902cc326fab30c5ad18d2a58fda96439bdb..58445b38f3fc1aa70dce98676f68c196d89d979c 100644 (file)
@@ -43,51 +43,6 @@ void nntrainerModelTest::validate(
   }
 }
 
-/**
- * @brief check given ini is failing/suceeding at unoptimized running
- */
-TEST_P(nntrainerModelTest, model_test) {
-  if (!shouldCompare()) {
-    std::cout << "[ SKIPPED  ] option not enabled \n";
-    return;
-  }
-  /** Check model with all optimizations off */
-  compare(false);
-
-  /// add stub test for tcm
-  EXPECT_TRUE(true);
-}
-
-/**
- * @brief check given ini is failing/suceeding at optimized running
- */
-TEST_P(nntrainerModelTest, model_test_optimized) {
-  if (!shouldCompare()) {
-    std::cout << "[ SKIPPED  ] option not enabled \n";
-    return;
-  }
-  /** Check model with all optimizations on */
-
-  compare(true);
-
-  /// add stub test for tcm
-  EXPECT_TRUE(true);
-}
-
-/**
- * @brief check given ini is failing/suceeding at validation
- */
-TEST_P(nntrainerModelTest, model_test_validate) {
-  if (!shouldValidate()) {
-    std::cout << "[ SKIPPED  ] option not enabled \n";
-    return;
-  }
-
-  validate(true);
-  /// add stub test for tcm
-  EXPECT_TRUE(true);
-}
-
 TEST_P(nntrainerModelTest, model_test_save_load_compare) {
   if (!shouldSaveLoadIniTest() || !shouldCompare()) {
     std::cout << "[ SKIPPED  ] option not enabled \n";
index 56640c474882e8a3a6f1f9c79bfacbab1e32844b..e25c97386a53076a137b43ea377b95b98ddb8bcc 100644 (file)
@@ -383,6 +383,21 @@ TEST(BasicProperty, setNotValid_03_n) {
   EXPECT_THROW(d.set({3, 3, 2, 4}), std::invalid_argument);
 }
 
+TEST(BasicProperty, setNotValid_04_n) {
+  DimensionOfBanana d;
+  EXPECT_THROW(d.set({1, 2, 3, 4, 5}), std::invalid_argument);
+}
+
+TEST(BasicProperty, setNotValid_05_n) {
+  DimensionOfBanana d;
+  EXPECT_THROW(d.set({0}), std::invalid_argument);
+}
+
+TEST(BasicProperty, setNotValid_06_n) {
+  DimensionOfBanana d;
+  EXPECT_THROW(d.set({0, 1}), std::invalid_argument);
+}
+
 TEST(BasicProperty, fromStringNotValid_01_n) {
   NumBanana b;
   EXPECT_THROW(nntrainer::from_string("not integer", b), std::invalid_argument);
index 7da478a2a54c368ff278d276f9823bfc8414e7b1..848bc18251a2dec8d13c6e6cfe094cba05f7bde5 100644 (file)
@@ -144,6 +144,27 @@ TEST(InputConnection, invalidFormat_n_06) {
                std::invalid_argument);
 }
 
+TEST(InputConnection, invalidFormat_n_07) {
+  using namespace nntrainer::props;
+  InputConnection actual;
+  EXPECT_THROW(nntrainer::from_string("name:layer0", actual),
+               std::invalid_argument);
+}
+
+TEST(InputConnection, invalidFormat_n_08) {
+  using namespace nntrainer::props;
+  InputConnection actual;
+  EXPECT_THROW(nntrainer::from_string("name(layer0)", actual),
+               std::invalid_argument);
+}
+
+TEST(InputConnection, invalidFormat_n_09) {
+  using namespace nntrainer::props;
+  InputConnection actual;
+  EXPECT_THROW(nntrainer::from_string("name==layer0", actual),
+               std::invalid_argument);
+}
+
 TEST(DropOutRate, dropout_01_n) {
   nntrainer::props::DropOutRate dropout;
   EXPECT_THROW(dropout.set(-0.5), std::invalid_argument);
index b5dd84ff90563cbc57807c6becbbcdc0e13b4740..fc44549cea9d7459b736a1356d75a4429b8c9e81 100644 (file)
@@ -53,7 +53,27 @@ TEST(nntrainer_Optimizer, create_02_n) {
 /**
  * @brief Optimizer create
  */
-TEST(nntrainer_Optimizer, setType_02_n) {
+TEST(nntrainer_Optimizer, create_03_n) {
+  std::unique_ptr<nntrainer::Optimizer> op;
+  auto &ac = nntrainer::AppContext::Global();
+  EXPECT_ANY_THROW(op =
+                     ac.createObject<nntrainer::Optimizer>("adam", {"lr=0.1"}));
+}
+
+/**
+ * @brief Optimizer create
+ */
+TEST(nntrainer_Optimizer, create_04_n) {
+  std::unique_ptr<nntrainer::Optimizer> op;
+  auto &ac = nntrainer::AppContext::Global();
+  EXPECT_ANY_THROW(
+    op = ac.createObject<nntrainer::Optimizer>("adam", {"learning_rate:0.1"}));
+}
+
+/**
+ * @brief Optimizer create
+ */
+TEST(nntrainer_Optimizer, create_05_n) {
   std::unique_ptr<nntrainer::Optimizer> op;
   auto &ac = nntrainer::AppContext::Global();
   EXPECT_NO_THROW(op = ac.createObject<nntrainer::Optimizer>("sgd", {}));
@@ -62,7 +82,27 @@ TEST(nntrainer_Optimizer, setType_02_n) {
 /**
  * @brief Optimizer create
  */
-TEST(nntrainer_Optimizer, setType_03_n) {
+TEST(nntrainer_Optimizer, create_06_n) {
+  std::unique_ptr<nntrainer::Optimizer> op;
+  auto &ac = nntrainer::AppContext::Global();
+  EXPECT_ANY_THROW(op =
+                     ac.createObject<nntrainer::Optimizer>("sgd", {"lr=0.1"}));
+}
+
+/**
+ * @brief Optimizer create
+ */
+TEST(nntrainer_Optimizer, create_07_n) {
+  std::unique_ptr<nntrainer::Optimizer> op;
+  auto &ac = nntrainer::AppContext::Global();
+  EXPECT_ANY_THROW(
+    op = ac.createObject<nntrainer::Optimizer>("sgd", {"learning_rate:0.1"}));
+}
+
+/**
+ * @brief Optimizer create
+ */
+TEST(nntrainer_Optimizer, create_08_n) {
   std::unique_ptr<nntrainer::Optimizer> op;
   auto &ac = nntrainer::AppContext::Global();
   EXPECT_ANY_THROW(op =
@@ -72,7 +112,7 @@ TEST(nntrainer_Optimizer, setType_03_n) {
 /**
  * @brief Optimizer create
  */
-TEST(nntrainer_Optimizer, setType_04_n) {
+TEST(nntrainer_Optimizer, create_09_n) {
   std::unique_ptr<nntrainer::Optimizer> op;
   auto &ac = nntrainer::AppContext::Global();
   EXPECT_ANY_THROW(
index b990e18d8bf0c03e6ab711e0fa81a77350bb7862..97fef1c4a027c754c4138b3a0546fac405bbe061 100644 (file)
@@ -64,6 +64,48 @@ TEST(lr_constant, ctor_initializer_04_n) {
                std::invalid_argument);
 }
 
+/**
+ * @brief test constructing lr scheduler
+ *
+ */
+TEST(lr_constant, ctor_initializer_05_n) {
+  EXPECT_THROW(nntrainer::createLearningRateScheduler<
+                 nntrainer::ConstantLearningRateScheduler>({"lr=0.1"}),
+               std::invalid_argument);
+}
+
+/**
+ * @brief test constructing lr scheduler
+ *
+ */
+TEST(lr_constant, ctor_initializer_06_n) {
+  EXPECT_THROW(
+    nntrainer::createLearningRateScheduler<
+      nntrainer::ConstantLearningRateScheduler>({"learning_rate:0.1"}),
+    std::invalid_argument);
+}
+
+/**
+ * @brief test constructing lr scheduler
+ *
+ */
+TEST(lr_constant, ctor_initializer_07_n) {
+  EXPECT_THROW(
+    nntrainer::createLearningRateScheduler<
+      nntrainer::ConstantLearningRateScheduler>({"learning_rate(0.1)"}),
+    std::invalid_argument);
+}
+
+/**
+ * @brief test constructing lr scheduler
+ *
+ */
+TEST(lr_constant, ctor_initializer_08_n) {
+  EXPECT_THROW(nntrainer::createLearningRateScheduler<
+                 nntrainer::ConstantLearningRateScheduler>({"0.1"}),
+               std::invalid_argument);
+}
+
 /**
  * @brief test set and get learning rate
  *
@@ -98,6 +140,24 @@ TEST(lr_constant, prop_03_p) {
   EXPECT_FLOAT_EQ(lr->getLearningRate(10), 1.0f);
 }
 
+/**
+ * @brief test set property with wrong format
+ *
+ */
+TEST(lr_constant, prop_04_n) {
+  auto lr = createLRS("constant");
+  EXPECT_THROW(lr->setProperty({"learning_rate:0.1"}), std::invalid_argument);
+}
+
+/**
+ * @brief test set property with wrong format
+ *
+ */
+TEST(lr_constant, prop_05_n) {
+  auto lr = createLRS("constant");
+  EXPECT_THROW(lr->setProperty({"learning_rate(0.1)"}), std::invalid_argument);
+}
+
 /**
  * @brief test set and get learning rate
  *
@@ -171,6 +231,14 @@ TEST(lr_exponential, prop_02_n) {
   EXPECT_THROW(lr->setProperty({"unknown=unknown"}), std::invalid_argument);
 }
 
+/**
+ * @brief test set property with wrong format
+ *
+ */
+TEST(lr_exponential, prop_03_n) {
+  auto lr = createLRS("exponential");
+  EXPECT_THROW(lr->setProperty({"learning_rate:0.1"}), std::invalid_argument);
+}
 /**
  * @brief test finalize
  *
@@ -212,6 +280,14 @@ TEST(lr_step, prop_01_n) {
   auto lr = createLRS("step");
   EXPECT_THROW(lr->setProperty({"unknown=unknown"}), std::invalid_argument);
 }
+/**
+ * @brief test set property with wrong format
+ *
+ */
+TEST(lr_step, prop_02_n) {
+  auto lr = createLRS("step");
+  EXPECT_THROW(lr->setProperty({"learning_rate:0.1"}), std::invalid_argument);
+}
 
 /**
  * @brief test finalize
index 0d6d47be3771a49f131841581d612233048ceed9..25461feb108ca8ec0fcb40e2094e08b4406fed2e 100644 (file)
@@ -545,6 +545,13 @@ TEST(nntrainer_Tensor, multiply_i_broadcast_not_broadcastable_02_n) {
   EXPECT_EQ(target.multiply_i(target2), ML_ERROR_INVALID_PARAMETER);
 }
 
+TEST(nntrainer_Tensor, multiply_i_broadcast_not_broadcastable_03_n) {
+  nntrainer::Tensor target(1, 2, 1, 2);
+  nntrainer::Tensor target2(1, 2, 3, 1);
+
+  EXPECT_EQ(target.multiply_i(target2), ML_ERROR_INVALID_PARAMETER);
+}
+
 TEST(nntrainer_Tensor, multiply_01_p) {
   int status = ML_ERROR_NONE;
   int batch = 3;
@@ -1191,6 +1198,13 @@ TEST(nntrainer_Tensor, divide_i_broadcast_not_broadcastable_02_n) {
   EXPECT_EQ(target.divide_i(target2), ML_ERROR_INVALID_PARAMETER);
 }
 
+TEST(nntrainer_Tensor, divide_i_broadcast_not_broadcastable_03_n) {
+  nntrainer::Tensor target(1, 2, 1, 2);
+  nntrainer::Tensor target2(1, 2, 3, 1);
+
+  EXPECT_EQ(target.divide_i(target2), ML_ERROR_INVALID_PARAMETER);
+}
+
 TEST(nntrainer_Tensor, add_i_01_p) {
   int status = ML_ERROR_NONE;
   int batch = 3;
@@ -1496,6 +1510,13 @@ TEST(nntrainer_Tensor, add_i_broadcast_not_broadcastable_02_n) {
   EXPECT_EQ(target.add_i(target2), ML_ERROR_INVALID_PARAMETER);
 }
 
+TEST(nntrainer_Tensor, add_i_broadcast_not_broadcastable_03_n) {
+  nntrainer::Tensor target(1, 2, 1, 2);
+  nntrainer::Tensor target2(1, 2, 3, 1);
+
+  EXPECT_EQ(target.add_i(target2), ML_ERROR_INVALID_PARAMETER);
+}
+
 TEST(nntrainer_Tensor, add_01_p) {
   int status = ML_ERROR_NONE;
   int batch = 3;
@@ -1660,6 +1681,27 @@ TEST(nntrainer_Tensor, pow_01_p) {
   EXPECT_EQ(actual, expected);
 }
 
+TEST(nntrainer_Tensor, subtract_i_broadcast_not_supported_01_n) {
+  nntrainer::Tensor target(3, 1, 3, 1);
+  nntrainer::Tensor target2(3, 1, 3, 3);
+
+  EXPECT_EQ(target.subtract_i(target2), ML_ERROR_INVALID_PARAMETER);
+}
+
+TEST(nntrainer_Tensor, subtract_i_broadcast_not_broadcastable_02_n) {
+  nntrainer::Tensor target(3, 2, 4, 5);
+  nntrainer::Tensor target2(3, 2, 3, 1);
+
+  EXPECT_EQ(target.subtract_i(target2), ML_ERROR_INVALID_PARAMETER);
+}
+
+TEST(nntrainer_Tensor, subtract_i_broadcast_not_broadcastable_03_n) {
+  nntrainer::Tensor target(1, 2, 1, 2);
+  nntrainer::Tensor target2(1, 2, 3, 1);
+
+  EXPECT_EQ(target.subtract_i(target2), ML_ERROR_INVALID_PARAMETER);
+}
+
 TEST(nntrainer_Tensor, subtract_i_01_p) {
   int status = ML_ERROR_NONE;
   int batch = 3;
@@ -3278,12 +3320,6 @@ TEST(nntrainer_Tensor, fill_p) {
     EXPECT_EQ(target, original);
   }
 
-  /// same dimension, buffer size is different (not tested)
-  {
-    /// there is no way to make non contiguous tensor publicily yet
-    EXPECT_TRUE(true);
-  }
-
   /// uninitialized with initialized flag is true
   {
     nntrainer::Tensor target;
index 619aa77f3adf9ef14996892d693913e9cd5adcb0..1cdd9467c02f22bc3fc7754f405f57d31d32d7c7 100644 (file)
@@ -4848,12 +4848,6 @@ TEST(nntrainer_Tensor, fill_p) {
     EXPECT_EQ(target, original);
   }
 
-  /// same dimension, buffer size is different (not tested)
-  {
-    /// there is no way to make non contiguous tensor publicily yet
-    EXPECT_TRUE(true);
-  }
-
   /// uninitialized with initialized flag is true
   {
     nntrainer::Tensor target;
index 6bb0cb8a12b51095f6e3dfd7c57f94da16428b8d..bda5efd4303ac0a85c5d53cca291e3ef394684e7 100644 (file)
@@ -4692,13 +4692,55 @@ TEST(nntrainer_Tensor, transpose_nhwc_p) {
   }
 }
 
-TEST(nntrainer_Tensor, tranpose_dimension_not_match_nhwc_n) {
+TEST(nntrainer_Tensor, tranpose_dimension_not_match_nhwc_01_n) {
   nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_);
   nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_);
 
   EXPECT_THROW(a.transpose("0:1:2", b), std::invalid_argument);
 }
 
+TEST(nntrainer_Tensor, tranpose_dimension_not_match_nhwc_02_n) {
+  nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_);
+  nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_);
+
+  EXPECT_THROW(a.transpose("0:1", b), std::invalid_argument);
+}
+
+TEST(nntrainer_Tensor, tranpose_dimension_not_match_nhwc_03_n) {
+  nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_);
+  nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_);
+
+  EXPECT_THROW(a.transpose("1:2:3:4", b), std::invalid_argument);
+}
+
+TEST(nntrainer_Tensor, tranpose_invalid_format_01_n) {
+  nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_);
+  nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_);
+
+  EXPECT_THROW(a.transpose("1<->4", b), std::invalid_argument);
+}
+
+TEST(nntrainer_Tensor, tranpose_invalid_format_02_n) {
+  nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_);
+  nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_);
+
+  EXPECT_THROW(a.transpose("2,0,1,3", b), std::invalid_argument);
+}
+
+TEST(nntrainer_Tensor, tranpose_invalid_format_03_n) {
+  nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_);
+  nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_);
+
+  EXPECT_THROW(a.transpose("2-0-1-3", b), std::invalid_argument);
+}
+
+TEST(nntrainer_Tensor, tranpose_invalid_format_04_n) {
+  nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_);
+  nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_);
+
+  EXPECT_THROW(a.transpose("2/0/1/3", b), std::invalid_argument);
+}
+
 // /**
 //  * @brief dequantize tensor with different format
 //  */