From: Inki Dae Date: Wed, 3 Jun 2020 08:42:34 +0000 (+0900) Subject: mv_inference: Change a function name from SetPluginType to SetPrivateData X-Git-Tag: accepted/tizen/unified/20200628.221641~4 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=6a71d87b3fdfcb1340a16520266183a1d180c64f;p=platform%2Fcore%2Fmultimedia%2Finference-engine-interface.git mv_inference: Change a function name from SetPluginType to SetPrivateData This patch changes a function name from SetPluginType to SetPrivateData to utilize it as more purposes which may be needed to pass some backend private data to a given backend before loading a model file. And also this patch makes IInferenceEngineCommon to become interface class by declaring SetPrivateData funtion as a pure virtual function. Change-Id: Icc0db970ca2b9d44e3954c50e837ebccc996b83d Signed-off-by: Inki Dae --- diff --git a/include/inference_engine_common.h b/include/inference_engine_common.h index be6b037..1897c38 100755 --- a/include/inference_engine_common.h +++ b/include/inference_engine_common.h @@ -31,14 +31,14 @@ public: virtual ~IInferenceEngineCommon() {}; /** - * @brief Set a tensor filter plugin type. - * @details See #inference_backend_type_e - * This callback passes a tensor filter plugin type - NNFW or VIVANTE to a tensor filter plugin for NNStreamer. + * @brief Set backend private data if needed. + * @details This callback passes a backend private data to a given backend. + * I.e., ML Single API backend needs which tensor filter type of NNStreamer should be used such as NNFW or VIVANTE. * - * @since_tizen 6.0 (Optional) - * @param[in] type This could be one among plugin types enumerated on inference_backend_type_e. + * @since_tizen 6.0 + * @param[in] data This could be backend specific data object. */ - virtual int SetPluginType(const int type = 0) { return type; } + virtual int SetPrivateData(void *data) = 0; /** * @brief Set target devices. diff --git a/src/inference_engine_common_impl.cpp b/src/inference_engine_common_impl.cpp index a3e0d52..6666cce 100755 --- a/src/inference_engine_common_impl.cpp +++ b/src/inference_engine_common_impl.cpp @@ -198,7 +198,7 @@ int InferenceEngineCommon::InitBackendEngine(const std::string &backend_path, in // If a backend is ML Single API of NNStreamer or NNFW then set a tensor filter plugin type. if (backend_type == INFERENCE_BACKEND_NNFW || backend_type == INFERENCE_BACKEND_MLAPI) { - int ret = mBackendHandle->SetPluginType(backend_type); + int ret = mBackendHandle->SetPrivateData(&backend_type); if (ret != INFERENCE_ENGINE_ERROR_NONE) { LOGE("Failed to set a tensor filter plugin."); return INFERENCE_ENGINE_ERROR_NOT_SUPPORTED;