#include <nntrainer_test_util.h>
/**
- * @brief Neural Network Layer Create / Delete Test (possitive test)
+ * @brief Neural Network Layer Create / Delete Test (positive test)
*/
TEST(nntrainer_capi_nnlayer, create_delete_01_p) {
ml_train_layer_h handle;
}
/**
- * @brief Neural Network Layer Create / Delete Test (possitive test)
+ * @brief Neural Network Layer Create / Delete Test (positive test)
*/
TEST(nntrainer_capi_nnlayer, create_delete_02_p) {
ml_train_layer_h handle;
EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
}
+/**
+ * @brief Neural Network Set Property Test (negative test)
+ */
+TEST(nntrainer_capi_nnlayer, setproperty_12_n) {
+ ml_train_layer_h handle = nullptr;
+ int status;
+ /**
+ * If property is set which is an inappropriate way, then error.
+ */
+ status = ml_train_layer_set_property(handle, "relu", NULL);
+ EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+}
+
+/**
+ * @brief Neural Network Set Property Test (negative test)
+ */
+TEST(nntrainer_capi_nnlayer, setproperty_13_n) {
+ ml_train_layer_h handle = nullptr;
+ int status;
+ /**
+ * If property is set which is an inappropriate way, then error.
+ */
+ status = ml_train_layer_set_property(handle, "=relu", NULL);
+ EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+}
+
+/**
+ * @brief Neural Network Set Property Test (negative test)
+ */
+TEST(nntrainer_capi_nnlayer, setproperty_14_n) {
+ ml_train_layer_h handle = nullptr;
+ int status;
+ /**
+ * If property is set which is an inappropriate way, then error.
+ */
+ status = ml_train_layer_set_property(handle, "=0.01", NULL);
+ EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+}
+
+/**
+ * @brief Neural Network Set Property Test (negative test)
+ */
+TEST(nntrainer_capi_nnlayer, setproperty_15_n) {
+ ml_train_layer_h handle = nullptr;
+ int status;
+ /**
+ * If property is set which is an inappropriate way, then error.
+ */
+ status = ml_train_layer_set_property(handle, "activation:relu", NULL);
+ EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+}
+
/**
* @brief Neural Network Layer Set Property Test (positive test)
*/
EXPECT_EQ(status, ML_ERROR_NONE);
}
+/**
+ * @brief Neural Network Layer Set Property Test (negative test )
+ */
+TEST(nntrainer_capi_nnlayer, setproperty_with_single_param_04_n) {
+ ml_train_layer_h handle;
+ int status;
+ status = ml_train_layer_create(&handle, ML_TRAIN_LAYER_TYPE_INPUT);
+ EXPECT_EQ(status, ML_ERROR_NONE);
+ status = ml_train_layer_set_property_with_single_param(
+ handle, "input_shape=1:1:6270 / normalization=true / standardization=true");
+ EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+
+ status = ml_train_layer_destroy(handle);
+ EXPECT_EQ(status, ML_ERROR_NONE);
+}
+
+/**
+ * @brief Neural Network Layer Set Property Test (negative test )
+ */
+TEST(nntrainer_capi_nnlayer, setproperty_with_single_param_05_n) {
+ ml_train_layer_h handle;
+ int status;
+ status = ml_train_layer_create(&handle, ML_TRAIN_LAYER_TYPE_INPUT);
+ EXPECT_EQ(status, ML_ERROR_NONE);
+ status = ml_train_layer_set_property_with_single_param(
+ handle,
+ "input_shape=1:1:6270 // normalization=true // standardization=true");
+ EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+
+ status = ml_train_layer_destroy(handle);
+ EXPECT_EQ(status, ML_ERROR_NONE);
+}
+
+/**
+ * @brief Neural Network Layer Set Property Test (negative test )
+ */
+TEST(nntrainer_capi_nnlayer, setproperty_with_single_param_06_n) {
+ ml_train_layer_h handle;
+ int status;
+ status = ml_train_layer_create(&handle, ML_TRAIN_LAYER_TYPE_INPUT);
+ EXPECT_EQ(status, ML_ERROR_NONE);
+ status = ml_train_layer_set_property_with_single_param(
+ handle, "input_shape=1:1:6270 : normalization=true : standardization=true");
+ EXPECT_EQ(status, ML_ERROR_INVALID_PARAMETER);
+
+ status = ml_train_layer_destroy(handle);
+ EXPECT_EQ(status, ML_ERROR_NONE);
+}
+
/*** since tizen 6.5 ***/
/**
};
LossRealizer r;
std::vector<std::unique_ptr<nntrainer::GraphRealizer>> realizers;
- compileAndRealizeAndEqual(r, realizers, before, after);
+ EXPECT_NO_THROW(compileAndRealizeAndEqual(r, realizers, before, after));
}
multi_slot_single_batch,
single_slot_single_batch));
-TEST(IterQueue, constructEmptySlots_n) {
+TEST(IterQueue, constructEmptySlots_01_n) {
+ EXPECT_ANY_THROW(nntrainer::IterationQueue(0, {}, {}));
+}
+
+TEST(IterQueue, constructEmptySlots_02_n) {
+ EXPECT_ANY_THROW(nntrainer::IterationQueue(0, {}, {{1}}));
+}
+
+TEST(IterQueue, constructEmptySlots_03_n) {
EXPECT_ANY_THROW(nntrainer::IterationQueue(0, {{1}}, {{1}}));
}
EXPECT_ANY_THROW(nntrainer::IterationQueue(1, {}, {{1}}));
}
-TEST(IterQueue, constructNotConsistentBatchSizeBetweenInputs_n) {
+TEST(IterQueue, constructNotConsistentBatchSizeBetweenInputs_01_n) {
EXPECT_ANY_THROW(
nntrainer::IterationQueue(1, {{3, 1, 1, 10}, {2, 1, 1, 10}}, {}));
}
-TEST(IterQueue, constructNotConsistentBatchSizeInLabel_n) {
+TEST(IterQueue, constructNotConsistentBatchSizeBetweenInputs_02_n) {
+ EXPECT_ANY_THROW(nntrainer::IterationQueue(1, {{3, 1}, {2, 1}}, {{1, 0}}));
+}
+
+TEST(IterQueue, constructNotConsistentBatchSizeInLabel_01_n) {
EXPECT_ANY_THROW(nntrainer::IterationQueue(1, {{3, 1, 1, 10}, {3, 1, 1, 10}},
{{2, 1, 1, 10}}));
}
-TEST(IterQueue, constructNotConsistentBatchSizeInLabel2_n) {
+TEST(IterQueue, constructNotConsistentBatchSizeInLabel_02_n) {
EXPECT_ANY_THROW(nntrainer::IterationQueue(1, {{3, 1, 1, 10}, {3, 1, 1, 10}},
{{3, 1, 1, 10}, {2, 1, 1, 10}}));
}
EXPECT_THROW(layer->setProperty({valid_properties}), std::invalid_argument);
}
-TEST_P(LayerSemantics, setPropertiesInvalid_n) {
+TEST_P(LayerSemantics, setPropertiesInvalid_01_n) {
auto lnode = nntrainer::createLayerNode(expected_type);
/** must not crash */
EXPECT_THROW(layer->setProperty({"unknown_props=2"}), std::invalid_argument);
}
+TEST_P(LayerSemantics, setPropertiesInvalid_02_n) {
+ auto lnode = nntrainer::createLayerNode(expected_type);
+ /** must not crash */
+ EXPECT_THROW(layer->setProperty({"unknown_props:2"}), std::invalid_argument);
+}
+
TEST_P(LayerSemantics, finalizeValidateLayerNode_p) {
auto lnode = nntrainer::createLayerNode(expected_type);
std::vector<std::string> props = {"name=test"};
compareRunContext(rc, golden_file, skip_calc_grad, skip_calc_deriv,
dropout_compare_60_percent, skip_cos_sim);
-
- EXPECT_TRUE(true); // stub test for tcm
}
MockCachePool *pool;
};
-/**
- * @brief creation and destruction
- */
-TEST_F(CachePoolTest, create_destroy) {}
-
/**
* @brief get cache memory
*/
}
}
-/**
- * @brief check given ini is failing/suceeding at unoptimized running
- */
-TEST_P(nntrainerModelTest, model_test) {
- if (!shouldCompare()) {
- std::cout << "[ SKIPPED ] option not enabled \n";
- return;
- }
- /** Check model with all optimizations off */
- compare(false);
-
- /// add stub test for tcm
- EXPECT_TRUE(true);
-}
-
-/**
- * @brief check given ini is failing/suceeding at optimized running
- */
-TEST_P(nntrainerModelTest, model_test_optimized) {
- if (!shouldCompare()) {
- std::cout << "[ SKIPPED ] option not enabled \n";
- return;
- }
- /** Check model with all optimizations on */
-
- compare(true);
-
- /// add stub test for tcm
- EXPECT_TRUE(true);
-}
-
-/**
- * @brief check given ini is failing/suceeding at validation
- */
-TEST_P(nntrainerModelTest, model_test_validate) {
- if (!shouldValidate()) {
- std::cout << "[ SKIPPED ] option not enabled \n";
- return;
- }
-
- validate(true);
- /// add stub test for tcm
- EXPECT_TRUE(true);
-}
-
TEST_P(nntrainerModelTest, model_test_save_load_compare) {
if (!shouldSaveLoadIniTest() || !shouldCompare()) {
std::cout << "[ SKIPPED ] option not enabled \n";
EXPECT_THROW(d.set({3, 3, 2, 4}), std::invalid_argument);
}
+TEST(BasicProperty, setNotValid_04_n) {
+ DimensionOfBanana d;
+ EXPECT_THROW(d.set({1, 2, 3, 4, 5}), std::invalid_argument);
+}
+
+TEST(BasicProperty, setNotValid_05_n) {
+ DimensionOfBanana d;
+ EXPECT_THROW(d.set({0}), std::invalid_argument);
+}
+
+TEST(BasicProperty, setNotValid_06_n) {
+ DimensionOfBanana d;
+ EXPECT_THROW(d.set({0, 1}), std::invalid_argument);
+}
+
TEST(BasicProperty, fromStringNotValid_01_n) {
NumBanana b;
EXPECT_THROW(nntrainer::from_string("not integer", b), std::invalid_argument);
std::invalid_argument);
}
+TEST(InputConnection, invalidFormat_n_07) {
+ using namespace nntrainer::props;
+ InputConnection actual;
+ EXPECT_THROW(nntrainer::from_string("name:layer0", actual),
+ std::invalid_argument);
+}
+
+TEST(InputConnection, invalidFormat_n_08) {
+ using namespace nntrainer::props;
+ InputConnection actual;
+ EXPECT_THROW(nntrainer::from_string("name(layer0)", actual),
+ std::invalid_argument);
+}
+
+TEST(InputConnection, invalidFormat_n_09) {
+ using namespace nntrainer::props;
+ InputConnection actual;
+ EXPECT_THROW(nntrainer::from_string("name==layer0", actual),
+ std::invalid_argument);
+}
+
TEST(DropOutRate, dropout_01_n) {
nntrainer::props::DropOutRate dropout;
EXPECT_THROW(dropout.set(-0.5), std::invalid_argument);
/**
* @brief Optimizer create
*/
-TEST(nntrainer_Optimizer, setType_02_n) {
+TEST(nntrainer_Optimizer, create_03_n) {
+ std::unique_ptr<nntrainer::Optimizer> op;
+ auto &ac = nntrainer::AppContext::Global();
+ EXPECT_ANY_THROW(op =
+ ac.createObject<nntrainer::Optimizer>("adam", {"lr=0.1"}));
+}
+
+/**
+ * @brief Optimizer create
+ */
+TEST(nntrainer_Optimizer, create_04_n) {
+ std::unique_ptr<nntrainer::Optimizer> op;
+ auto &ac = nntrainer::AppContext::Global();
+ EXPECT_ANY_THROW(
+ op = ac.createObject<nntrainer::Optimizer>("adam", {"learning_rate:0.1"}));
+}
+
+/**
+ * @brief Optimizer create
+ */
+TEST(nntrainer_Optimizer, create_05_n) {
std::unique_ptr<nntrainer::Optimizer> op;
auto &ac = nntrainer::AppContext::Global();
EXPECT_NO_THROW(op = ac.createObject<nntrainer::Optimizer>("sgd", {}));
/**
* @brief Optimizer create
*/
-TEST(nntrainer_Optimizer, setType_03_n) {
+TEST(nntrainer_Optimizer, create_06_n) {
+ std::unique_ptr<nntrainer::Optimizer> op;
+ auto &ac = nntrainer::AppContext::Global();
+ EXPECT_ANY_THROW(op =
+ ac.createObject<nntrainer::Optimizer>("sgd", {"lr=0.1"}));
+}
+
+/**
+ * @brief Optimizer create
+ */
+TEST(nntrainer_Optimizer, create_07_n) {
+ std::unique_ptr<nntrainer::Optimizer> op;
+ auto &ac = nntrainer::AppContext::Global();
+ EXPECT_ANY_THROW(
+ op = ac.createObject<nntrainer::Optimizer>("sgd", {"learning_rate:0.1"}));
+}
+
+/**
+ * @brief Optimizer create
+ */
+TEST(nntrainer_Optimizer, create_08_n) {
std::unique_ptr<nntrainer::Optimizer> op;
auto &ac = nntrainer::AppContext::Global();
EXPECT_ANY_THROW(op =
/**
* @brief Optimizer create
*/
-TEST(nntrainer_Optimizer, setType_04_n) {
+TEST(nntrainer_Optimizer, create_09_n) {
std::unique_ptr<nntrainer::Optimizer> op;
auto &ac = nntrainer::AppContext::Global();
EXPECT_ANY_THROW(
std::invalid_argument);
}
+/**
+ * @brief test constructing lr scheduler
+ *
+ */
+TEST(lr_constant, ctor_initializer_05_n) {
+ EXPECT_THROW(nntrainer::createLearningRateScheduler<
+ nntrainer::ConstantLearningRateScheduler>({"lr=0.1"}),
+ std::invalid_argument);
+}
+
+/**
+ * @brief test constructing lr scheduler
+ *
+ */
+TEST(lr_constant, ctor_initializer_06_n) {
+ EXPECT_THROW(
+ nntrainer::createLearningRateScheduler<
+ nntrainer::ConstantLearningRateScheduler>({"learning_rate:0.1"}),
+ std::invalid_argument);
+}
+
+/**
+ * @brief test constructing lr scheduler
+ *
+ */
+TEST(lr_constant, ctor_initializer_07_n) {
+ EXPECT_THROW(
+ nntrainer::createLearningRateScheduler<
+ nntrainer::ConstantLearningRateScheduler>({"learning_rate(0.1)"}),
+ std::invalid_argument);
+}
+
+/**
+ * @brief test constructing lr scheduler
+ *
+ */
+TEST(lr_constant, ctor_initializer_08_n) {
+ EXPECT_THROW(nntrainer::createLearningRateScheduler<
+ nntrainer::ConstantLearningRateScheduler>({"0.1"}),
+ std::invalid_argument);
+}
+
/**
* @brief test set and get learning rate
*
EXPECT_FLOAT_EQ(lr->getLearningRate(10), 1.0f);
}
+/**
+ * @brief test set property with wrong format
+ *
+ */
+TEST(lr_constant, prop_04_n) {
+ auto lr = createLRS("constant");
+ EXPECT_THROW(lr->setProperty({"learning_rate:0.1"}), std::invalid_argument);
+}
+
+/**
+ * @brief test set property with wrong format
+ *
+ */
+TEST(lr_constant, prop_05_n) {
+ auto lr = createLRS("constant");
+ EXPECT_THROW(lr->setProperty({"learning_rate(0.1)"}), std::invalid_argument);
+}
+
/**
* @brief test set and get learning rate
*
EXPECT_THROW(lr->setProperty({"unknown=unknown"}), std::invalid_argument);
}
+/**
+ * @brief test set property with wrong format
+ *
+ */
+TEST(lr_exponential, prop_03_n) {
+ auto lr = createLRS("exponential");
+ EXPECT_THROW(lr->setProperty({"learning_rate:0.1"}), std::invalid_argument);
+}
/**
* @brief test finalize
*
auto lr = createLRS("step");
EXPECT_THROW(lr->setProperty({"unknown=unknown"}), std::invalid_argument);
}
+/**
+ * @brief test set property with wrong format
+ *
+ */
+TEST(lr_step, prop_02_n) {
+ auto lr = createLRS("step");
+ EXPECT_THROW(lr->setProperty({"learning_rate:0.1"}), std::invalid_argument);
+}
/**
* @brief test finalize
EXPECT_EQ(target.multiply_i(target2), ML_ERROR_INVALID_PARAMETER);
}
+TEST(nntrainer_Tensor, multiply_i_broadcast_not_broadcastable_03_n) {
+ nntrainer::Tensor target(1, 2, 1, 2);
+ nntrainer::Tensor target2(1, 2, 3, 1);
+
+ EXPECT_EQ(target.multiply_i(target2), ML_ERROR_INVALID_PARAMETER);
+}
+
TEST(nntrainer_Tensor, multiply_01_p) {
int status = ML_ERROR_NONE;
int batch = 3;
EXPECT_EQ(target.divide_i(target2), ML_ERROR_INVALID_PARAMETER);
}
+TEST(nntrainer_Tensor, divide_i_broadcast_not_broadcastable_03_n) {
+ nntrainer::Tensor target(1, 2, 1, 2);
+ nntrainer::Tensor target2(1, 2, 3, 1);
+
+ EXPECT_EQ(target.divide_i(target2), ML_ERROR_INVALID_PARAMETER);
+}
+
TEST(nntrainer_Tensor, add_i_01_p) {
int status = ML_ERROR_NONE;
int batch = 3;
EXPECT_EQ(target.add_i(target2), ML_ERROR_INVALID_PARAMETER);
}
+TEST(nntrainer_Tensor, add_i_broadcast_not_broadcastable_03_n) {
+ nntrainer::Tensor target(1, 2, 1, 2);
+ nntrainer::Tensor target2(1, 2, 3, 1);
+
+ EXPECT_EQ(target.add_i(target2), ML_ERROR_INVALID_PARAMETER);
+}
+
TEST(nntrainer_Tensor, add_01_p) {
int status = ML_ERROR_NONE;
int batch = 3;
EXPECT_EQ(actual, expected);
}
+TEST(nntrainer_Tensor, subtract_i_broadcast_not_supported_01_n) {
+ nntrainer::Tensor target(3, 1, 3, 1);
+ nntrainer::Tensor target2(3, 1, 3, 3);
+
+ EXPECT_EQ(target.subtract_i(target2), ML_ERROR_INVALID_PARAMETER);
+}
+
+TEST(nntrainer_Tensor, subtract_i_broadcast_not_broadcastable_02_n) {
+ nntrainer::Tensor target(3, 2, 4, 5);
+ nntrainer::Tensor target2(3, 2, 3, 1);
+
+ EXPECT_EQ(target.subtract_i(target2), ML_ERROR_INVALID_PARAMETER);
+}
+
+TEST(nntrainer_Tensor, subtract_i_broadcast_not_broadcastable_03_n) {
+ nntrainer::Tensor target(1, 2, 1, 2);
+ nntrainer::Tensor target2(1, 2, 3, 1);
+
+ EXPECT_EQ(target.subtract_i(target2), ML_ERROR_INVALID_PARAMETER);
+}
+
TEST(nntrainer_Tensor, subtract_i_01_p) {
int status = ML_ERROR_NONE;
int batch = 3;
EXPECT_EQ(target, original);
}
- /// same dimension, buffer size is different (not tested)
- {
- /// there is no way to make non contiguous tensor publicily yet
- EXPECT_TRUE(true);
- }
-
/// uninitialized with initialized flag is true
{
nntrainer::Tensor target;
EXPECT_EQ(target, original);
}
- /// same dimension, buffer size is different (not tested)
- {
- /// there is no way to make non contiguous tensor publicily yet
- EXPECT_TRUE(true);
- }
-
/// uninitialized with initialized flag is true
{
nntrainer::Tensor target;
}
}
-TEST(nntrainer_Tensor, tranpose_dimension_not_match_nhwc_n) {
+TEST(nntrainer_Tensor, tranpose_dimension_not_match_nhwc_01_n) {
nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_);
nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_);
EXPECT_THROW(a.transpose("0:1:2", b), std::invalid_argument);
}
+TEST(nntrainer_Tensor, tranpose_dimension_not_match_nhwc_02_n) {
+ nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_);
+ nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_);
+
+ EXPECT_THROW(a.transpose("0:1", b), std::invalid_argument);
+}
+
+TEST(nntrainer_Tensor, tranpose_dimension_not_match_nhwc_03_n) {
+ nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_);
+ nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_);
+
+ EXPECT_THROW(a.transpose("1:2:3:4", b), std::invalid_argument);
+}
+
+TEST(nntrainer_Tensor, tranpose_invalid_format_01_n) {
+ nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_);
+ nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_);
+
+ EXPECT_THROW(a.transpose("1<->4", b), std::invalid_argument);
+}
+
+TEST(nntrainer_Tensor, tranpose_invalid_format_02_n) {
+ nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_);
+ nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_);
+
+ EXPECT_THROW(a.transpose("2,0,1,3", b), std::invalid_argument);
+}
+
+TEST(nntrainer_Tensor, tranpose_invalid_format_03_n) {
+ nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_);
+ nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_);
+
+ EXPECT_THROW(a.transpose("2-0-1-3", b), std::invalid_argument);
+}
+
+TEST(nntrainer_Tensor, tranpose_invalid_format_04_n) {
+ nntrainer::Tensor a(3, 5, 2, 4, NHWC_, FP32_);
+ nntrainer::Tensor b(3, 3, 1, 2, NHWC_, FP32_);
+
+ EXPECT_THROW(a.transpose("2/0/1/3", b), std::invalid_argument);
+}
+
// /**
// * @brief dequantize tensor with different format
// */