Add inference engine private data support 67/316167/6
authorInki Dae <inki.dae@samsung.com>
Wed, 11 Dec 2024 06:59:52 +0000 (15:59 +0900)
committerInki Dae <inki.dae@samsung.com>
Mon, 23 Dec 2024 00:11:23 +0000 (09:11 +0900)
[Version] : 0.5.2
[Issue type] : new feature

Add inference engine private data support. As for this, This patch adds
a new enumeration and structure to the inference_engine_type.h header file
to manage private data in an inference engine.

With this patch, we can deliver inference engine specific data to
each inference engine backend.

Change-Id: I2cbb015aa4262315982adcc5c6119ada7cad543f
Signed-off-by: Inki Dae <inki.dae@samsung.com>
include/inference_engine_common_impl.h
include/inference_engine_type.h
packaging/inference-engine-interface.spec
src/inference_engine_common_impl.cpp

index 97f28885d1eb9a1d6e4f4849d9de3911e734ffce..66c471451c2ff3fa436c3aaf1bb8a2f245fdd996 100644 (file)
@@ -39,6 +39,15 @@ namespace Common
 
                ~InferenceEngineCommon();
 
+               /**
+                * @brief Set inference engine specific data.
+                *        This function is called by upper framework to pass private data to inference engine
+                *        such as number of threads for tensorflow lite engine backend.
+                *
+                * @param[in] data A point to inference_engine_private_data object.
+                */
+               int SetPrivateData(void *data);
+
                /**
                 * @brief Load configuration information from a given ini file.
                 *        If not given then /etc/inference/inference_engine_mlapi_backend.ini file will be used in default.
index 6c5d1e5ec7c49aebe061e99469c147bc3dbbd7b2..19cfc7a2c03dcc853660a6b284fcc5a603965b63 100644 (file)
@@ -139,6 +139,11 @@ extern "C"
                INFERENCE_ENGINE_CLTUNER_MAX
        } inference_engine_cltuner_mode_e;
 
+       typedef enum {
+               INFERENCE_ENGINE_PRIVATE_TYPE_NONE = -1,
+               INFERENCE_ENGINE_PRIVATE_TYPE_NUM_OF_THREADS
+       } inference_engine_private_data_e;
+
        /**
         * @brief Tensor defined by the dimension and their corresponding data
         * @details @a dimInfo is the information
@@ -258,6 +263,17 @@ extern "C"
                // TODO.
        } inference_engine_capacity;
 
+       /**
+        * @brief A private structure to a backend engine.
+        *
+        * @details This structure is used to deliver inference engine specific private information
+        *          such as number of threads in case of tensorflow lite.
+        */
+       typedef struct _inference_engine_private_data {
+               inference_engine_private_data_e data_type;
+               int32_t int_value;
+       } inference_engine_private_data;
+
 #ifdef __cplusplus
 }
 #endif /* __cplusplus */
index 027d4bc9876fc434cf610acab6366f3b3808d9d4..5d3f9471350e05478d5eb49e0fd57c2b758beade 100644 (file)
@@ -1,6 +1,6 @@
 Name:        inference-engine-interface
 Summary:     Interface of inference engines
-Version:     0.5.1
+Version:     0.5.2
 Release:     0
 Group:       Multimedia/Framework
 License:     Apache-2.0
index 0d6b63e81ea41eab48711a1f9f94bb75a9dc64b3..b3ba4eb7c154df44ddff3fe9983575895e9846be 100644 (file)
@@ -537,6 +537,14 @@ namespace Common
                return ret;
        }
 
+       int InferenceEngineCommon::SetPrivateData(void *data)
+       {
+               CHECK_ENGINE_INSTANCE(mBackendHandle);
+
+               return mBackendHandle->SetPrivateData(data);
+       }
+
+
        int InferenceEngineCommon::GetInputTensorBuffers(IETensorBuffer &buffers)
        {
                CHECK_ENGINE_INSTANCE(mBackendHandle);