#include "inference_engine_test_common.h"
typedef std::tuple<std::string> ParamType_One;
+typedef std::tuple<std::string, int> ParamType_Two;
typedef std::tuple<std::string, int, std::vector<std::string>> ParamType_Three;
typedef std::tuple<std::string, int, int, int, int, std::vector<std::string>, int, int, int, std::vector<std::string>, std::vector<std::string>, std::vector<std::string>, std::vector<int>> ParamType_Many;
class InferenceEngineTestCase_G1 : public testing::TestWithParam<ParamType_One> { };
+class InferenceEngineTestCase_G2 : public testing::TestWithParam<ParamType_Two> { };
class InferenceEngineTestCase_G3 : public testing::TestWithParam<ParamType_Three> { };
class InferenceEngineTestCase_G4 : public testing::TestWithParam<ParamType_Many> { };
engine->UnbindBackend();
}
-TEST_P(InferenceEngineTestCase_G3, Load_P)
+TEST_P(InferenceEngineTestCase_G1, Capacity_P)
{
std::string backend_name;
- int target_devices;
- std::vector<std::string> model_paths;
- std::tie(backend_name, target_devices, model_paths) = GetParam();
+ std::tie(backend_name) = GetParam();
std::cout <<"backend = " << backend_name << std::endl;
ret = engine->GetBackendCapacity(&capacity);
EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+ engine->UnbindBackend();
+}
+
+TEST_P(InferenceEngineTestCase_G1, Capacity_N1)
+{
+ std::string backend_name;
+
+ std::tie(backend_name) = GetParam();
+
+ std::cout <<"backend = " << backend_name << std::endl;
+
+ auto engine = std::make_unique<InferenceEngineCommon>();
+ ASSERT_TRUE(engine);
+
+ int ret = InferenceEngineInit_One_Param(engine.get(), backend_name);
+ ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+ ret = engine->GetBackendCapacity(nullptr);
+ EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_INVALID_PARAMETER);
+
+ engine->UnbindBackend();
+}
+
+TEST_P(InferenceEngineTestCase_G1, Capacity_N2)
+{
+ std::string backend_name;
+
+ std::tie(backend_name) = GetParam();
+
+ std::cout <<"backend = " << backend_name << std::endl;
+
+ auto engine = std::make_unique<InferenceEngineCommon>();
+ ASSERT_TRUE(engine);
+
+ inference_engine_capacity capacity;
+ int ret = engine->GetBackendCapacity(&capacity);
+ EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_INVALID_OPERATION);
+}
+
+TEST_P(InferenceEngineTestCase_G1, SetTarget_N1)
+{
+ std::string backend_name;
+
+ std::tie(backend_name) = GetParam();
+
+ std::cout <<"backend = " << backend_name << std::endl;
+
+ auto engine = std::make_unique<InferenceEngineCommon>();
+ ASSERT_TRUE(engine);
+
+ int ret = InferenceEngineInit_One_Param(engine.get(), backend_name);
+ ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+ ret = engine->SetTargetDevices(0);
+ EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_INVALID_PARAMETER);
+
+ engine->UnbindBackend();
+}
+
+TEST_P(InferenceEngineTestCase_G2, SetTarget_N2)
+{
+ std::string backend_name;
+ int target_devices;
+
+ std::tie(backend_name, target_devices) = GetParam();
+
+ std::cout <<"backend = " << backend_name << std::endl;
+
+ auto engine = std::make_unique<InferenceEngineCommon>();
+ ASSERT_TRUE(engine);
+
+ int ret = engine->SetTargetDevices(target_devices);
+ EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_INVALID_OPERATION);
+}
+
+TEST_P(InferenceEngineTestCase_G2, SetTarget_P)
+{
+ std::string backend_name;
+ int target_devices;
+
+ std::tie(backend_name, target_devices) = GetParam();
+
+ std::cout <<"backend = " << backend_name << std::endl;
+
+ auto engine = std::make_unique<InferenceEngineCommon>();
+ ASSERT_TRUE(engine);
+
+ int ret = InferenceEngineInit_One_Param(engine.get(), backend_name);
+ ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
ret = engine->SetTargetDevices(target_devices);
EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+ engine->UnbindBackend();
+}
+
+TEST_P(InferenceEngineTestCase_G3, Load_P)
+{
+ std::string backend_name;
+ int target_devices;
+ std::vector<std::string> model_paths;
+
+ std::tie(backend_name, target_devices, model_paths) = GetParam();
+
+ std::cout <<"backend = " << backend_name << std::endl;
+
+ auto engine = std::make_unique<InferenceEngineCommon>();
+ ASSERT_TRUE(engine);
+
+ int ret = InferenceEngineInit_Two_Params(engine.get(), backend_name, target_devices);
+ ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
std::vector <std::string> models;
int model_type = GetModelInfo(model_paths, models);
if (model_type == -1) {
engine->UnbindBackend();
}
+TEST_P(InferenceEngineTestCase_G3, Load_N1)
+{
+ std::string backend_name;
+ int target_devices;
+ std::vector<std::string> model_paths;
+
+ std::tie(backend_name, target_devices, model_paths) = GetParam();
+
+ std::cout <<"backend = " << backend_name << std::endl;
+
+ auto engine = std::make_unique<InferenceEngineCommon>();
+ ASSERT_TRUE(engine);
+
+ std::vector<std::string> models;
+ int model_type = GetModelInfo(model_paths, models);
+ ASSERT_NE(model_type, -1);
+
+ int ret = engine->Load(models, (inference_model_format_e)model_type);
+ EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_INVALID_OPERATION);
+}
+
+TEST_P(InferenceEngineTestCase_G2, Load_N2)
+{
+ std::string backend_name;
+ int target_devices;
+ std::vector<std::string> model_paths = { "/path/to/wrong/ic_tflite_model.tflite" };
+
+ std::tie(backend_name, target_devices) = GetParam();
+
+ std::cout <<"backend = " << backend_name << std::endl;
+
+ auto engine = std::make_unique<InferenceEngineCommon>();
+ ASSERT_TRUE(engine);
+
+ int ret = InferenceEngineInit_Two_Params(engine.get(), backend_name, target_devices);
+ ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
+
+ std::vector<std::string> models;
+ int model_type = GetModelInfo(model_paths, models);
+ ASSERT_NE(model_type, -1);
+
+ ret = engine->Load(models, (inference_model_format_e)model_type);
+ EXPECT_EQ(ret, INFERENCE_ENGINE_ERROR_INVALID_PATH);
+}
+
TEST_P(InferenceEngineTestCase_G4, Inference_P)
{
std::string backend_name;
)
);
+INSTANTIATE_TEST_CASE_P(Prefix, InferenceEngineTestCase_G2,
+ testing::Values(
+ // parameter order : backend name, target device
+ // ARMNN.
+ ParamType_Two("armnn", INFERENCE_TARGET_CPU),
+ // TFLITE.
+ ParamType_Two("tflite", INFERENCE_TARGET_CPU)
+ /* TODO */
+ )
+);
+
INSTANTIATE_TEST_CASE_P(Prefix, InferenceEngineTestCase_G3,
testing::Values(
// parameter order : backend name, target device, model path/s