From: Inki Dae Date: Wed, 13 May 2020 08:07:59 +0000 (+0900) Subject: Add a new BindBackend function with backend type X-Git-Tag: submit/tizen/20200602.011936~5 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=refs%2Fchanges%2F53%2F233253%2F4;p=platform%2Fcore%2Fmultimedia%2Finference-engine-interface.git Add a new BindBackend function with backend type This patch adds a new BindBackend function which requires backend type as a parameter. Change-Id: I842be4033d8cfd2cc3f2e455bd02123c95f126b4 Signed-off-by: Inki Dae --- diff --git a/include/inference_engine_common_impl.h b/include/inference_engine_common_impl.h index c659c05..f823f9f 100755 --- a/include/inference_engine_common_impl.h +++ b/include/inference_engine_common_impl.h @@ -36,7 +36,6 @@ public: ~InferenceEngineCommon(); - /** * @brief Load a backend engine library with a given backend name. * @details This callback loads a backend engine library with a given backend name. @@ -50,6 +49,19 @@ public: */ int BindBackend(inference_engine_config *config); + /** + * @brief Load a backend engine library with a given backend type. + * @details This callback loads a backend engine library with a given backend type. + * In order to find a backend engine library corresponding to the given backend type, + * this function makes a full name of the library file with given backend type. + * After that, it opens the library file by calling dlopen function to find a entry point + * function - EngineInit - of a actual backend library. + * + * @since_tizen 6.0 + * @param[in] backend_type A eumeration value which indicates one of backend types - refer to inference_backend_type_e. + */ + int BindBackend(int backend_type); + /** * @brief Unload a backend engine library. * @details This callback unload a backend engine library. @@ -217,7 +229,7 @@ public: int DumpProfileToFile(const std::string filename = "dump.txt"); private: - int InitBackendEngine(std::string &backend_path); + int InitBackendEngine(const std::string &backend_path); int CheckTensorBuffers(std::vector &buffers); int CheckLayerProperty(inference_engine_layer_property &property); diff --git a/src/inference_engine_common_impl.cpp b/src/inference_engine_common_impl.cpp index 8ef900d..f8b902d 100755 --- a/src/inference_engine_common_impl.cpp +++ b/src/inference_engine_common_impl.cpp @@ -167,7 +167,7 @@ int InferenceEngineCommon::DumpProfileToFile(const std::string filename) return INFERENCE_ENGINE_ERROR_NONE; } -int InferenceEngineCommon::InitBackendEngine(std::string &backend_path) +int InferenceEngineCommon::InitBackendEngine(const std::string &backend_path) { LOGI("lib: %s", backend_path.c_str()); mBackendModule = dlopen(backend_path.c_str(), RTLD_NOW); @@ -234,6 +234,47 @@ int InferenceEngineCommon::BindBackend(inference_engine_config *config) return INFERENCE_ENGINE_ERROR_NONE; } +int InferenceEngineCommon::BindBackend(int backend_type) +{ + LOGI("ENTER"); + + if (mBackendHandle) { + LOGE("Already backend engine has been initialized."); + return INFERENCE_ENGINE_ERROR_INVALID_OPERATION; + } + + if (backend_type <= INFERENCE_BACKEND_NONE || backend_type >= INFERENCE_BACKEND_MAX) { + LOGE("Backend type is invalid."); + return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER; + } + + if (mUseProfiler == true) { + // Memory usage will be measured between BindBackend ~ UnbindBackend callbacks. + mProfiler->Start(IE_PROFILER_MEMORY); + } + + std::string backendNameTable[INFERENCE_BACKEND_MAX] = { + [INFERENCE_BACKEND_OPENCV] = "opencv", + [INFERENCE_BACKEND_TFLITE] = "tflite", + [INFERENCE_BACKEND_ARMNN] = "armnn" + }; + + std::string backendLibName = "libinference-engine-" + backendNameTable[backend_type] + ".so"; + + int ret = InitBackendEngine(backendLibName); + if (ret != INFERENCE_ENGINE_ERROR_NONE) { + return ret; + } + + if (mUseProfiler == true) { + mProfiler->AddBackendName(backendNameTable[backend_type]); + } + + LOGI("LEAVE"); + + return INFERENCE_ENGINE_ERROR_NONE; +} + void InferenceEngineCommon::UnbindBackend(void) { LOGW("ENTER");