~InferenceEngineCommon();
-
/**
* @brief Load a backend engine library with a given backend name.
* @details This callback loads a backend engine library with a given backend name.
int BindBackend(inference_engine_config *config);
/**
+ * @brief Load a backend engine library with a given backend type.
+ * @details This callback loads a backend engine library with a given backend type.
+ * In order to find a backend engine library corresponding to the given backend type,
+ * this function makes a full name of the library file with given backend type.
+ * After that, it opens the library file by calling dlopen function to find a entry point
+ * function - EngineInit - of a actual backend library.
+ *
+ * @since_tizen 6.0
+ * @param[in] backend_type A eumeration value which indicates one of backend types - refer to inference_backend_type_e.
+ */
+ int BindBackend(int backend_type);
+
+ /**
* @brief Unload a backend engine library.
* @details This callback unload a backend engine library.
*
int DumpProfileToFile(const std::string filename = "dump.txt");
private:
- int InitBackendEngine(std::string &backend_path);
+ int InitBackendEngine(const std::string &backend_path);
int CheckTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers);
int CheckLayerProperty(inference_engine_layer_property &property);
return INFERENCE_ENGINE_ERROR_NONE;
}
-int InferenceEngineCommon::InitBackendEngine(std::string &backend_path)
+int InferenceEngineCommon::InitBackendEngine(const std::string &backend_path)
{
LOGI("lib: %s", backend_path.c_str());
mBackendModule = dlopen(backend_path.c_str(), RTLD_NOW);
return INFERENCE_ENGINE_ERROR_NONE;
}
+int InferenceEngineCommon::BindBackend(int backend_type)
+{
+ LOGI("ENTER");
+
+ if (mBackendHandle) {
+ LOGE("Already backend engine has been initialized.");
+ return INFERENCE_ENGINE_ERROR_INVALID_OPERATION;
+ }
+
+ if (backend_type <= INFERENCE_BACKEND_NONE || backend_type >= INFERENCE_BACKEND_MAX) {
+ LOGE("Backend type is invalid.");
+ return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
+ }
+
+ if (mUseProfiler == true) {
+ // Memory usage will be measured between BindBackend ~ UnbindBackend callbacks.
+ mProfiler->Start(IE_PROFILER_MEMORY);
+ }
+
+ std::string backendNameTable[INFERENCE_BACKEND_MAX] = {
+ [INFERENCE_BACKEND_OPENCV] = "opencv",
+ [INFERENCE_BACKEND_TFLITE] = "tflite",
+ [INFERENCE_BACKEND_ARMNN] = "armnn"
+ };
+
+ std::string backendLibName = "libinference-engine-" + backendNameTable[backend_type] + ".so";
+
+ int ret = InitBackendEngine(backendLibName);
+ if (ret != INFERENCE_ENGINE_ERROR_NONE) {
+ return ret;
+ }
+
+ if (mUseProfiler == true) {
+ mProfiler->AddBackendName(backendNameTable[backend_type]);
+ }
+
+ LOGI("LEAVE");
+
+ return INFERENCE_ENGINE_ERROR_NONE;
+}
+
void InferenceEngineCommon::UnbindBackend(void)
{
LOGW("ENTER");