From: Inki Dae Date: Tue, 12 May 2020 07:28:33 +0000 (+0900) Subject: test: update test cases X-Git-Tag: submit/tizen/20200602.011936~13 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=refs%2Fchanges%2F14%2F233114%2F1;p=platform%2Fcore%2Fmultimedia%2Finference-engine-interface.git test: update test cases This patch adds below test cases, - Two negative tests for GetBackendCapacity callback. - Two negative tests and one positive test for SetTargetDevices callback. - Two negative tests for Load callback. Change-Id: I30c3703d73be91455a0ac2afc8970f81731a7e43 Signed-off-by: Inki Dae --- diff --git a/test/src/inference_engine_tc.cpp b/test/src/inference_engine_tc.cpp index 9b2293c..5cfd884 100644 --- a/test/src/inference_engine_tc.cpp +++ b/test/src/inference_engine_tc.cpp @@ -29,10 +29,12 @@ #include "inference_engine_test_common.h" typedef std::tuple ParamType_One; +typedef std::tuple ParamType_Two; typedef std::tuple> ParamType_Three; typedef std::tuple, int, int, int, std::vector, std::vector, std::vector, std::vector> ParamType_Many; class InferenceEngineTestCase_G1 : public testing::TestWithParam { }; +class InferenceEngineTestCase_G2 : public testing::TestWithParam { }; class InferenceEngineTestCase_G3 : public testing::TestWithParam { }; class InferenceEngineTestCase_G4 : public testing::TestWithParam { }; @@ -74,13 +76,11 @@ TEST_P(InferenceEngineTestCase_G1, Bind_P) engine->UnbindBackend(); } -TEST_P(InferenceEngineTestCase_G3, Load_P) +TEST_P(InferenceEngineTestCase_G1, Capacity_P) { std::string backend_name; - int target_devices; - std::vector model_paths; - std::tie(backend_name, target_devices, model_paths) = GetParam(); + std::tie(backend_name) = GetParam(); std::cout <<"backend = " << backend_name << std::endl; @@ -94,9 +94,118 @@ TEST_P(InferenceEngineTestCase_G3, Load_P) ret = engine->GetBackendCapacity(&capacity); EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE); + engine->UnbindBackend(); +} + +TEST_P(InferenceEngineTestCase_G1, Capacity_N1) +{ + std::string backend_name; + + std::tie(backend_name) = GetParam(); + + std::cout <<"backend = " << backend_name << std::endl; + + auto engine = std::make_unique(); + ASSERT_TRUE(engine); + + int ret = InferenceEngineInit_One_Param(engine.get(), backend_name); + ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE); + + ret = engine->GetBackendCapacity(nullptr); + EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_INVALID_PARAMETER); + + engine->UnbindBackend(); +} + +TEST_P(InferenceEngineTestCase_G1, Capacity_N2) +{ + std::string backend_name; + + std::tie(backend_name) = GetParam(); + + std::cout <<"backend = " << backend_name << std::endl; + + auto engine = std::make_unique(); + ASSERT_TRUE(engine); + + inference_engine_capacity capacity; + int ret = engine->GetBackendCapacity(&capacity); + EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_INVALID_OPERATION); +} + +TEST_P(InferenceEngineTestCase_G1, SetTarget_N1) +{ + std::string backend_name; + + std::tie(backend_name) = GetParam(); + + std::cout <<"backend = " << backend_name << std::endl; + + auto engine = std::make_unique(); + ASSERT_TRUE(engine); + + int ret = InferenceEngineInit_One_Param(engine.get(), backend_name); + ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE); + + ret = engine->SetTargetDevices(0); + EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_INVALID_PARAMETER); + + engine->UnbindBackend(); +} + +TEST_P(InferenceEngineTestCase_G2, SetTarget_N2) +{ + std::string backend_name; + int target_devices; + + std::tie(backend_name, target_devices) = GetParam(); + + std::cout <<"backend = " << backend_name << std::endl; + + auto engine = std::make_unique(); + ASSERT_TRUE(engine); + + int ret = engine->SetTargetDevices(target_devices); + EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_INVALID_OPERATION); +} + +TEST_P(InferenceEngineTestCase_G2, SetTarget_P) +{ + std::string backend_name; + int target_devices; + + std::tie(backend_name, target_devices) = GetParam(); + + std::cout <<"backend = " << backend_name << std::endl; + + auto engine = std::make_unique(); + ASSERT_TRUE(engine); + + int ret = InferenceEngineInit_One_Param(engine.get(), backend_name); + ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE); + ret = engine->SetTargetDevices(target_devices); EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE); + engine->UnbindBackend(); +} + +TEST_P(InferenceEngineTestCase_G3, Load_P) +{ + std::string backend_name; + int target_devices; + std::vector model_paths; + + std::tie(backend_name, target_devices, model_paths) = GetParam(); + + std::cout <<"backend = " << backend_name << std::endl; + + auto engine = std::make_unique(); + ASSERT_TRUE(engine); + + int ret = InferenceEngineInit_Two_Params(engine.get(), backend_name, target_devices); + ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE); + std::vector models; int model_type = GetModelInfo(model_paths, models); if (model_type == -1) { @@ -110,6 +219,51 @@ TEST_P(InferenceEngineTestCase_G3, Load_P) engine->UnbindBackend(); } +TEST_P(InferenceEngineTestCase_G3, Load_N1) +{ + std::string backend_name; + int target_devices; + std::vector model_paths; + + std::tie(backend_name, target_devices, model_paths) = GetParam(); + + std::cout <<"backend = " << backend_name << std::endl; + + auto engine = std::make_unique(); + ASSERT_TRUE(engine); + + std::vector models; + int model_type = GetModelInfo(model_paths, models); + ASSERT_NE(model_type, -1); + + int ret = engine->Load(models, (inference_model_format_e)model_type); + EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_INVALID_OPERATION); +} + +TEST_P(InferenceEngineTestCase_G2, Load_N2) +{ + std::string backend_name; + int target_devices; + std::vector model_paths = { "/path/to/wrong/ic_tflite_model.tflite" }; + + std::tie(backend_name, target_devices) = GetParam(); + + std::cout <<"backend = " << backend_name << std::endl; + + auto engine = std::make_unique(); + ASSERT_TRUE(engine); + + int ret = InferenceEngineInit_Two_Params(engine.get(), backend_name, target_devices); + ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE); + + std::vector models; + int model_type = GetModelInfo(model_paths, models); + ASSERT_NE(model_type, -1); + + ret = engine->Load(models, (inference_model_format_e)model_type); + EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_INVALID_PATH); +} + TEST_P(InferenceEngineTestCase_G4, Inference_P) { std::string backend_name; @@ -238,6 +392,17 @@ INSTANTIATE_TEST_CASE_P(Prefix, InferenceEngineTestCase_G1, ) ); +INSTANTIATE_TEST_CASE_P(Prefix, InferenceEngineTestCase_G2, + testing::Values( + // parameter order : backend name, target device + // ARMNN. + ParamType_Two("armnn", INFERENCE_TARGET_CPU), + // TFLITE. + ParamType_Two("tflite", INFERENCE_TARGET_CPU) + /* TODO */ + ) +); + INSTANTIATE_TEST_CASE_P(Prefix, InferenceEngineTestCase_G3, testing::Values( // parameter order : backend name, target device, model path/s