test: update test cases 14/233114/1
authorInki Dae <inki.dae@samsung.com>
Tue, 12 May 2020 07:28:33 +0000 (16:28 +0900)
committerInki Dae <inki.dae@samsung.com>
Tue, 12 May 2020 07:41:37 +0000 (16:41 +0900)
This patch adds below test cases,
 - Two negative tests for GetBackendCapacity callback.
 - Two negative tests and one positive test for SetTargetDevices callback.
 - Two negative tests for Load callback.

Change-Id: I30c3703d73be91455a0ac2afc8970f81731a7e43
Signed-off-by: Inki Dae <inki.dae@samsung.com>
test/src/inference_engine_tc.cpp

index 9b2293c..5cfd884 100644 (file)
 #include "inference_engine_test_common.h"
 
 typedef std::tuple<std::string> ParamType_One;
+typedef std::tuple<std::string, int> ParamType_Two;
 typedef std::tuple<std::string, int, std::vector<std::string>> ParamType_Three;
 typedef std::tuple<std::string, int, int, int, int, std::vector<std::string>, int, int, int, std::vector<std::string>, std::vector<std::string>, std::vector<std::string>, std::vector<int>> ParamType_Many;
 
 class InferenceEngineTestCase_G1 : public testing::TestWithParam<ParamType_One> { };
+class InferenceEngineTestCase_G2 : public testing::TestWithParam<ParamType_Two> { };
 class InferenceEngineTestCase_G3 : public testing::TestWithParam<ParamType_Three> { };
 class InferenceEngineTestCase_G4 : public testing::TestWithParam<ParamType_Many> { };
 
@@ -74,13 +76,11 @@ TEST_P(InferenceEngineTestCase_G1, Bind_P)
        engine->UnbindBackend();
 }
 
-TEST_P(InferenceEngineTestCase_G3, Load_P)
+TEST_P(InferenceEngineTestCase_G1, Capacity_P)
 {
        std::string backend_name;
-       int target_devices;
-       std::vector<std::string> model_paths;
 
-       std::tie(backend_name, target_devices, model_paths) = GetParam();
+       std::tie(backend_name) = GetParam();
 
        std::cout <<"backend = " << backend_name << std::endl;
 
@@ -94,9 +94,118 @@ TEST_P(InferenceEngineTestCase_G3, Load_P)
        ret = engine->GetBackendCapacity(&capacity);
        EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
+       engine->UnbindBackend();
+}
+
+TEST_P(InferenceEngineTestCase_G1, Capacity_N1)
+{
+       std::string backend_name;
+
+       std::tie(backend_name) = GetParam();
+
+       std::cout <<"backend = " << backend_name << std::endl;
+
+       auto engine = std::make_unique<InferenceEngineCommon>();
+       ASSERT_TRUE(engine);
+
+       int ret = InferenceEngineInit_One_Param(engine.get(), backend_name);
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+       ret = engine->GetBackendCapacity(nullptr);
+       EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_INVALID_PARAMETER);
+
+       engine->UnbindBackend();
+}
+
+TEST_P(InferenceEngineTestCase_G1, Capacity_N2)
+{
+       std::string backend_name;
+
+       std::tie(backend_name) = GetParam();
+
+       std::cout <<"backend = " << backend_name << std::endl;
+
+       auto engine = std::make_unique<InferenceEngineCommon>();
+       ASSERT_TRUE(engine);
+
+       inference_engine_capacity capacity;
+       int ret = engine->GetBackendCapacity(&capacity);
+       EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_INVALID_OPERATION);
+}
+
+TEST_P(InferenceEngineTestCase_G1, SetTarget_N1)
+{
+       std::string backend_name;
+
+       std::tie(backend_name) = GetParam();
+
+       std::cout <<"backend = " << backend_name << std::endl;
+
+       auto engine = std::make_unique<InferenceEngineCommon>();
+       ASSERT_TRUE(engine);
+
+       int ret = InferenceEngineInit_One_Param(engine.get(), backend_name);
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+       ret = engine->SetTargetDevices(0);
+       EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_INVALID_PARAMETER);
+
+       engine->UnbindBackend();
+}
+
+TEST_P(InferenceEngineTestCase_G2, SetTarget_N2)
+{
+       std::string backend_name;
+       int target_devices;
+
+       std::tie(backend_name, target_devices) = GetParam();
+
+       std::cout <<"backend = " << backend_name << std::endl;
+
+       auto engine = std::make_unique<InferenceEngineCommon>();
+       ASSERT_TRUE(engine);
+
+       int ret = engine->SetTargetDevices(target_devices);
+       EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_INVALID_OPERATION);
+}
+
+TEST_P(InferenceEngineTestCase_G2, SetTarget_P)
+{
+       std::string backend_name;
+       int target_devices;
+
+       std::tie(backend_name, target_devices) = GetParam();
+
+       std::cout <<"backend = " << backend_name << std::endl;
+
+       auto engine = std::make_unique<InferenceEngineCommon>();
+       ASSERT_TRUE(engine);
+
+       int ret = InferenceEngineInit_One_Param(engine.get(), backend_name);
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
        ret = engine->SetTargetDevices(target_devices);
        EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
 
+       engine->UnbindBackend();
+}
+
+TEST_P(InferenceEngineTestCase_G3, Load_P)
+{
+       std::string backend_name;
+       int target_devices;
+       std::vector<std::string> model_paths;
+
+       std::tie(backend_name, target_devices, model_paths) = GetParam();
+
+       std::cout <<"backend = " << backend_name << std::endl;
+
+       auto engine = std::make_unique<InferenceEngineCommon>();
+       ASSERT_TRUE(engine);
+
+       int ret = InferenceEngineInit_Two_Params(engine.get(), backend_name, target_devices);
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
        std::vector <std::string> models;
        int model_type = GetModelInfo(model_paths, models);
        if (model_type == -1) {
@@ -110,6 +219,51 @@ TEST_P(InferenceEngineTestCase_G3, Load_P)
        engine->UnbindBackend();
 }
 
+TEST_P(InferenceEngineTestCase_G3, Load_N1)
+{
+       std::string backend_name;
+       int target_devices;
+       std::vector<std::string> model_paths;
+
+       std::tie(backend_name, target_devices, model_paths) = GetParam();
+
+       std::cout <<"backend = " << backend_name << std::endl;
+
+       auto engine = std::make_unique<InferenceEngineCommon>();
+       ASSERT_TRUE(engine);
+
+       std::vector<std::string> models;
+       int model_type = GetModelInfo(model_paths, models);
+       ASSERT_NE(model_type, -1);
+
+       int ret = engine->Load(models, (inference_model_format_e)model_type);
+       EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_INVALID_OPERATION);
+}
+
+TEST_P(InferenceEngineTestCase_G2, Load_N2)
+{
+       std::string backend_name;
+       int target_devices;
+       std::vector<std::string> model_paths = { "/path/to/wrong/ic_tflite_model.tflite" };
+
+       std::tie(backend_name, target_devices) = GetParam();
+
+       std::cout <<"backend = " << backend_name << std::endl;
+
+       auto engine = std::make_unique<InferenceEngineCommon>();
+       ASSERT_TRUE(engine);
+
+       int ret = InferenceEngineInit_Two_Params(engine.get(), backend_name, target_devices);
+       ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+       std::vector<std::string> models;
+       int model_type = GetModelInfo(model_paths, models);
+       ASSERT_NE(model_type, -1);
+
+       ret = engine->Load(models, (inference_model_format_e)model_type);
+       EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_INVALID_PATH);
+}
+
 TEST_P(InferenceEngineTestCase_G4, Inference_P)
 {
        std::string backend_name;
@@ -238,6 +392,17 @@ INSTANTIATE_TEST_CASE_P(Prefix, InferenceEngineTestCase_G1,
                )
 );
 
+INSTANTIATE_TEST_CASE_P(Prefix, InferenceEngineTestCase_G2,
+               testing::Values(
+                       // parameter order : backend name, target device
+                       // ARMNN.
+                       ParamType_Two("armnn", INFERENCE_TARGET_CPU),
+                       // TFLITE.
+                       ParamType_Two("tflite", INFERENCE_TARGET_CPU)
+                       /* TODO */
+               )
+);
+
 INSTANTIATE_TEST_CASE_P(Prefix, InferenceEngineTestCase_G3,
                testing::Values(
                        // parameter order : backend name, target device, model path/s