From: Inki Dae Date: Wed, 29 Dec 2021 09:23:42 +0000 (+0900) Subject: src: use ml_single_open_full api X-Git-Tag: submit/tizen/20220105.080154~2 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=db16f45efe816a4e7521085836059b308e1f4c12;p=platform%2Fcore%2Fmultimedia%2Finference-engine-mlapi.git src: use ml_single_open_full api [Version] : 1.3.2-0 [Issue type] : bug fix Replaced ml_single_open with ml_single_open_full api for various target devices with SNPE engine. In case of SNPE tensor filter of NNStreamer, target device is decided by user-given custom property. So use ml_single_open_full api which allows custom property for it. Change-Id: I2a6f1ab2b619c59164e4043fcfb03dd0cea97ad6 Signed-off-by: Inki Dae --- diff --git a/packaging/inference-engine-mlapi.spec b/packaging/inference-engine-mlapi.spec index 230705d..97e668c 100644 --- a/packaging/inference-engine-mlapi.spec +++ b/packaging/inference-engine-mlapi.spec @@ -1,6 +1,6 @@ Name: inference-engine-mlapi Summary: ML Single API backend of NNStreamer for MediaVision -Version: 0.3.1 +Version: 0.3.2 Release: 0 Group: Multimedia/Libraries License: Apache-2.0 diff --git a/src/inference_engine_mlapi.cpp b/src/inference_engine_mlapi.cpp index b9aeaf4..e119e57 100644 --- a/src/inference_engine_mlapi.cpp +++ b/src/inference_engine_mlapi.cpp @@ -277,6 +277,15 @@ namespace MLAPIImpl } } + const char *InferenceMLAPI::GetCustomProp() + { + if (mPluginType != INFERENCE_BACKEND_SNPE) + return ""; + + return mTargetDevice == INFERENCE_TARGET_CPU ? "RUNTIME:CPU" : + mTargetDevice == INFERENCE_TARGET_GPU ? "RUNTIME:GPU" : "RUNTIME:DSP"; + } + int InferenceMLAPI::Load(std::vector model_paths, inference_model_format_e model_format) { @@ -314,8 +323,8 @@ namespace MLAPIImpl return ret; } - int err = ml_single_open(&mSingle, model_str.c_str(), in_info, out_info, - nnfw_type, nnfw_hw); + int err = ml_single_open_full(&mSingle, model_str.c_str(), in_info, out_info, + nnfw_type, nnfw_hw, GetCustomProp()); if (err != ML_ERROR_NONE) { LOGE("Failed to request ml_single_open(%d).", err); return INFERENCE_ENGINE_ERROR_INVALID_OPERATION; diff --git a/src/inference_engine_mlapi_private.h b/src/inference_engine_mlapi_private.h index 98ab53a..52dfaff 100644 --- a/src/inference_engine_mlapi_private.h +++ b/src/inference_engine_mlapi_private.h @@ -88,6 +88,7 @@ namespace MLAPIImpl bool IsFileReadable(const std::string& path); std::tuple GetNNFWInfo(); std::string GetModelPath(const std::vector& model_paths); + const char *GetCustomProp(); int mPluginType; int mTargetDevice;