*/
int DumpProfileToFile(const std::string filename = "dump.txt");
- /**
- * @brief Get inference results.
- *
- * @since_tizen 5.5
- */
- int SetLibraryPath(std::string path);
- int SetBackendEngine(inference_backend_type_e backend);
-
private:
int InitBackendEngine(std::string &backend_path);
int CheckTensorBuffers(std::vector<inference_engine_tensor_buffer> &buffers);
return ret;
}
-int InferenceEngineCommon::SetLibraryPath(std::string path)
-{
- LOGE("ENTER");
- if (path.empty())
- return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
-
- if (fs::is_directory(path)) {
- if(path.back() != '/')
- path += "/";
-
- mBackendLibName = path + mBackendLibName;
- }
- else {
- if (fs::is_regular_file(path)){
- mBackendLibName = path;
- }
- else {
- LOGE("Fail to find path. [%s]", path.c_str());
- return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
- }
- }
- LOGE("lib: %s", mBackendLibName.c_str());
- LOGE("LEAVE");
-
- return INFERENCE_ENGINE_ERROR_NONE;
-}
-
-int InferenceEngineCommon::SetBackendEngine(inference_backend_type_e backend)
-{
- std::string backendString;
- switch(backend){
- case INFERENCE_BACKEND_OPENCV:
- backendString = "opencv";
- break;
- case INFERENCE_BACKEND_TFLITE:
- backendString = "tflite";
- break;
- case INFERENCE_BACKEND_ARMNN:
- backendString = "armnn";
- break;
- default:
- LOGE("Not supported backend engine [%d]", backend);
- return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;
- }
-
- mBackendLibName = "libinference-engine-" + backendString + ".so";
-
- return INFERENCE_ENGINE_ERROR_NONE;
-}
} /* Common */
} /* InferenceEngineInterface */