~InferenceEngineCommon();
+ /**
+ * @brief Set inference engine specific data.
+ * This function is called by upper framework to pass private data to inference engine
+ * such as number of threads for tensorflow lite engine backend.
+ *
+ * @param[in] data A point to inference_engine_private_data object.
+ */
+ int SetPrivateData(void *data);
+
/**
* @brief Load configuration information from a given ini file.
* If not given then /etc/inference/inference_engine_mlapi_backend.ini file will be used in default.
INFERENCE_ENGINE_CLTUNER_MAX
} inference_engine_cltuner_mode_e;
+ typedef enum {
+ INFERENCE_ENGINE_PRIVATE_TYPE_NONE = -1,
+ INFERENCE_ENGINE_PRIVATE_TYPE_NUM_OF_THREADS
+ } inference_engine_private_data_e;
+
/**
* @brief Tensor defined by the dimension and their corresponding data
* @details @a dimInfo is the information
// TODO.
} inference_engine_capacity;
+ /**
+ * @brief A private structure to a backend engine.
+ *
+ * @details This structure is used to deliver inference engine specific private information
+ * such as number of threads in case of tensorflow lite.
+ */
+ typedef struct _inference_engine_private_data {
+ inference_engine_private_data_e data_type;
+ int32_t int_value;
+ } inference_engine_private_data;
+
#ifdef __cplusplus
}
#endif /* __cplusplus */
return ret;
}
+ int InferenceEngineCommon::SetPrivateData(void *data)
+ {
+ CHECK_ENGINE_INSTANCE(mBackendHandle);
+
+ return mBackendHandle->SetPrivateData(data);
+ }
+
+
int InferenceEngineCommon::GetInputTensorBuffers(IETensorBuffer &buffers)
{
CHECK_ENGINE_INSTANCE(mBackendHandle);