*/
int BindBackend(inference_engine_config *config);
- /**
- * @brief Load a backend engine library with a given backend type.
- * @details This callback loads a backend engine library with a given backend type.
- * In order to find a backend engine library corresponding to the given backend type,
- * this function makes a full name of the library file with given backend type.
- * After that, it opens the library file by calling dlopen function to find a entry point
- * function - EngineInit - of a actual backend library.
- *
- * @since_tizen 6.0
- * @param[in] backend_type A eumeration value which indicates one of backend types - refer to inference_backend_type_e.
- */
- int BindBackend(int backend_type, int device_type);
-
/**
* @brief Unload a backend engine library.
* @details This callback unload a backend engine library.
return INFERENCE_ENGINE_ERROR_NONE;
}
- int InferenceEngineCommon::BindBackend(int backend_type, int device_type)
- {
- LOGI("ENTER");
-
- if (mBackendHandle) {
- LOGE("Already backend engine has been initialized.");
- return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
- }
-
- if (backend_type <= INFERENCE_BACKEND_NONE ||
- backend_type >= INFERENCE_BACKEND_MAX) {
- LOGE("Backend type is invalid.");
- return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
- }
-
- if (mUseProfiler == true) {
- // Memory usage will be measured between BindBackend ~ UnbindBackend callbacks.
- mProfiler.Start(IE_PROFILER_MEMORY);
- }
-
- std::string backendNameTable[INFERENCE_BACKEND_MAX] = {
- [INFERENCE_BACKEND_OPENCV] = "opencv",
- [INFERENCE_BACKEND_TFLITE] = "tflite",
- [INFERENCE_BACKEND_ARMNN] = "armnn",
- [INFERENCE_BACKEND_MLAPI] = "mlapi",
- [INFERENCE_BACKEND_ONE] = "mlapi"
- };
-
- int api_fw_type;
-
- // For two backend types - MLAPI and ONE, MLAPI will be used as API framework in default.
- // And for all NPU devices passed with INFERENCE_TARGET_CUSTOM type, MLAPI will be used as API framework in default.
- if (UseMLAPI(backend_type, device_type))
- api_fw_type = INFERENCE_BACKEND_MLAPI;
- else
- api_fw_type = backend_type;
-
- std::string backendLibName =
- "libinference-engine-" + backendNameTable[api_fw_type] + ".so";
-
- int ret = InitBackendEngine(backendLibName, backend_type, device_type);
- if (ret != INFERENCE_ENGINE_ERROR_NONE) {
- return ret;
- }
-
- if (mUseProfiler == true) {
- mProfiler.AddBackendName(backendNameTable[backend_type]);
- }
-
- LOGI("LEAVE");
-
- return INFERENCE_ENGINE_ERROR_NONE;
- }
-
void InferenceEngineCommon::UnbindBackend(void)
{
LOGW("ENTER");
std::vector<std::string>, std::vector<std::string>,
std::vector<std::string>, std::vector<int> >
ParamType_Many;
-typedef std::tuple<int> ParamType_One_Int;
class InferenceEngineTestCase_G1 : public testing::TestWithParam<ParamType_One>
{};
{};
class InferenceEngineTestCase_G6 : public testing::TestWithParam<ParamType_Many>
{};
-class InferenceEngineTestCase_G7
- : public testing::TestWithParam<ParamType_One_Int>
-{};
-class InferenceEngineTestCase_G8
- : public testing::TestWithParam<ParamType_One_Int>
-{};
static auto InferenceEngineInit_One_Param =
[](InferenceEngineCommon *engine, std::string &backend_name) -> int {
models.clear();
}
-TEST_P(InferenceEngineTestCase_G7, Bind_P)
-{
- int backend_type;
-
- std::tie(backend_type) = GetParam();
-
- auto engine = std::make_unique<InferenceEngineCommon>();
- ASSERT_TRUE(engine);
-
- int ret = engine->LoadConfigFile();
- ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-
- ret = engine->BindBackend(backend_type, INFERENCE_TARGET_CPU);
- ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-
- engine->UnbindBackend();
-}
-
-TEST_P(InferenceEngineTestCase_G8, Bind_N)
-{
- int backend_type;
-
- std::tie(backend_type) = GetParam();
-
- auto engine = std::make_unique<InferenceEngineCommon>();
- ASSERT_TRUE(engine);
-
- int ret = engine->LoadConfigFile();
- ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_NONE);
-
- ret = engine->BindBackend(backend_type, 0);
- ASSERT_EQ(ret, INFERENCE_ENGINE_ERROR_INVALID_PARAMETER);
-}
-
INSTANTIATE_TEST_CASE_P(Prefix, InferenceEngineTestCase_G1,
testing::Values(
// parameter order : backend name
{ 955 })
/* TODO */
));
-
-INSTANTIATE_TEST_CASE_P(Prefix, InferenceEngineTestCase_G7,
- testing::Values(
- // parameter order : backend type
- // ARMNN.
- ParamType_One_Int(INFERENCE_BACKEND_ARMNN),
- // TFLITE.
- ParamType_One_Int(INFERENCE_BACKEND_TFLITE),
- // OPENCV.
- ParamType_One_Int(INFERENCE_BACKEND_OPENCV),
- // ML Single API for NNStreamer with On-device Neural Engine tensor filter.
- ParamType_One_Int(INFERENCE_BACKEND_ONE),
- // ML Single API for NNStreamer with Vinvante NPU.
- ParamType_One_Int(INFERENCE_BACKEND_MLAPI)
- /* TODO */
- ));
-
-INSTANTIATE_TEST_CASE_P(Prefix, InferenceEngineTestCase_G8,
- testing::Values(
- // parameter order : backend type
- // Wrong backend type.
- ParamType_One_Int(-1)
- /* TODO */
- ));