From: Inki Dae Date: Wed, 11 Dec 2024 06:59:52 +0000 (+0900) Subject: Add inference engine private data support X-Git-Tag: accepted/tizen/9.0/unified/20250102.114430^0 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=6515212f7fb208ddd6169bcf5da91f1eec7890f8;p=platform%2Fcore%2Fmultimedia%2Finference-engine-interface.git Add inference engine private data support [Version] : 0.5.2 [Issue type] : new feature Add inference engine private data support. As for this, This patch adds a new enumeration and structure to the inference_engine_type.h header file to manage private data in an inference engine. With this patch, we can deliver inference engine specific data to each inference engine backend. Change-Id: I2cbb015aa4262315982adcc5c6119ada7cad543f Signed-off-by: Inki Dae --- diff --git a/include/inference_engine_common_impl.h b/include/inference_engine_common_impl.h index 97f2888..66c4714 100644 --- a/include/inference_engine_common_impl.h +++ b/include/inference_engine_common_impl.h @@ -39,6 +39,15 @@ namespace Common ~InferenceEngineCommon(); + /** + * @brief Set inference engine specific data. + * This function is called by upper framework to pass private data to inference engine + * such as number of threads for tensorflow lite engine backend. + * + * @param[in] data A point to inference_engine_private_data object. + */ + int SetPrivateData(void *data); + /** * @brief Load configuration information from a given ini file. * If not given then /etc/inference/inference_engine_mlapi_backend.ini file will be used in default. diff --git a/include/inference_engine_type.h b/include/inference_engine_type.h index 6c5d1e5..19cfc7a 100644 --- a/include/inference_engine_type.h +++ b/include/inference_engine_type.h @@ -139,6 +139,11 @@ extern "C" INFERENCE_ENGINE_CLTUNER_MAX } inference_engine_cltuner_mode_e; + typedef enum { + INFERENCE_ENGINE_PRIVATE_TYPE_NONE = -1, + INFERENCE_ENGINE_PRIVATE_TYPE_NUM_OF_THREADS + } inference_engine_private_data_e; + /** * @brief Tensor defined by the dimension and their corresponding data * @details @a dimInfo is the information @@ -258,6 +263,17 @@ extern "C" // TODO. } inference_engine_capacity; + /** + * @brief A private structure to a backend engine. + * + * @details This structure is used to deliver inference engine specific private information + * such as number of threads in case of tensorflow lite. + */ + typedef struct _inference_engine_private_data { + inference_engine_private_data_e data_type; + int32_t int_value; + } inference_engine_private_data; + #ifdef __cplusplus } #endif /* __cplusplus */ diff --git a/packaging/inference-engine-interface.spec b/packaging/inference-engine-interface.spec index 027d4bc..5d3f947 100644 --- a/packaging/inference-engine-interface.spec +++ b/packaging/inference-engine-interface.spec @@ -1,6 +1,6 @@ Name: inference-engine-interface Summary: Interface of inference engines -Version: 0.5.1 +Version: 0.5.2 Release: 0 Group: Multimedia/Framework License: Apache-2.0 diff --git a/src/inference_engine_common_impl.cpp b/src/inference_engine_common_impl.cpp index 0d6b63e..b3ba4eb 100644 --- a/src/inference_engine_common_impl.cpp +++ b/src/inference_engine_common_impl.cpp @@ -537,6 +537,14 @@ namespace Common return ret; } + int InferenceEngineCommon::SetPrivateData(void *data) + { + CHECK_ENGINE_INSTANCE(mBackendHandle); + + return mBackendHandle->SetPrivateData(data); + } + + int InferenceEngineCommon::GetInputTensorBuffers(IETensorBuffer &buffers) { CHECK_ENGINE_INSTANCE(mBackendHandle);