From: Inki Dae Date: Thu, 13 Feb 2020 07:35:11 +0000 (+0900) Subject: Pass input tensor buffers coming from Inference layer to backend engine X-Git-Tag: submit/tizen/20200423.063253~58 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=8e04c2854d646d53ffea8a048a7a54688b6f5175;p=platform%2Fcore%2Fmultimedia%2Finference-engine-interface.git Pass input tensor buffers coming from Inference layer to backend engine This patch makes InferenceEngineCommon layer to pass input tensor buffers allocated by Inference layer to a backend engine for the inference. As for this, it adds new several callbacks, and drops unnecessary ones. Change-Id: I03fd9aa9ce73cb2c1092a5a50d4147ff90e4462e Signed-off-by: Inki Dae --- diff --git a/common/inference_engine_common_impl.cpp b/common/inference_engine_common_impl.cpp index 503ecc1..cd7edcd 100755 --- a/common/inference_engine_common_impl.cpp +++ b/common/inference_engine_common_impl.cpp @@ -143,16 +143,6 @@ void InferenceEngineCommon::UnbindBackend(void) LOGW("LEAVE"); } -int InferenceEngineCommon::SetOutputTensorParamNodes(std::vector nodes) -{ - LOGI("ENTER"); - int ret = engine->SetOutputTensorParamNodes(nodes); - if (ret != INFERENCE_ENGINE_ERROR_NONE) - LOGE("Fail to SetOutputTensorParamNodes"); - LOGI("LEAVE"); - return ret; -} - int InferenceEngineCommon::SetTargetDevices(int types) { int ret = engine->SetTargetDevices(types); @@ -170,44 +160,24 @@ int InferenceEngineCommon::Load(std::vector model_paths, inference_ if (ret != INFERENCE_ENGINE_ERROR_NONE) LOGE("Fail to load InferenceEngineVision"); - ret = engine->CreateInputLayerPassage(); - if (ret != INFERENCE_ENGINE_ERROR_NONE) - LOGE("Fail to load CreateInputLayerPassage"); - LOGI("LEAVE"); return INFERENCE_ENGINE_ERROR_NONE; } -int InferenceEngineCommon::GetInputLayerAttrType() -{ - return engine->GetInputLayerAttrType(); -} - -void * InferenceEngineCommon::GetInputDataPtr() +int InferenceEngineCommon::GetInputTensorBuffers(std::vector &buffers) { - return engine->GetInputDataPtr(); + return engine->GetInputTensorBuffers(buffers); } -int InferenceEngineCommon::GetInputTensorProperty(inference_engine_layer_property *property) +int InferenceEngineCommon::GetInputLayerProperty(inference_engine_layer_property &property) { - LOGI("ENTER"); - LOGI("LEAVE"); - - return INFERENCE_ENGINE_ERROR_NONE; -} - -int InferenceEngineCommon::GetOutputTensorProperty(inference_engine_layer_property *property) -{ - LOGI("ENTER"); - LOGI("LEAVE"); - - return INFERENCE_ENGINE_ERROR_NONE; + return engine->GetInputLayerProperty(property); } -int InferenceEngineCommon::SetInputDataBuffer(tensor_t data) +int InferenceEngineCommon::GetOutputLayerProperty(inference_engine_layer_property &property) { - return engine->SetInputDataBuffer(data); + return engine->GetOutputLayerProperty(property); } int InferenceEngineCommon::SetInputTensorProperty(inference_engine_layer_property &property) @@ -231,42 +201,9 @@ int InferenceEngineCommon::GetBackendCapacity(inference_engine_capacity *capacit return engine->GetBackendCapacity(capacity); } -int InferenceEngineCommon::Run() +int InferenceEngineCommon::Run(std::vector &input_buffers) { - int ret = engine->Run(); - if (ret != INFERENCE_ENGINE_ERROR_NONE) - LOGE("Fail to run InferenceEngineCommon"); - - return ret; -} - -int InferenceEngineCommon::Run(std::vector tensor) -{ - int ret = engine->Run(tensor); - if (ret != INFERENCE_ENGINE_ERROR_NONE) - LOGE("Fail to run InferenceEngineCommon"); - - return ret; -} - - -int InferenceEngineCommon::Run(std::vector &input_buffers, - std::vector &output_buffers) -{ - LOGI("ENTER"); - LOGI("LEAVE"); - - return INFERENCE_ENGINE_ERROR_NONE; -} - -int InferenceEngineCommon::SetInputTensorParamNode(std::string node) -{ - LOGE("ENTER"); - int ret = engine->SetInputTensorParamNode(node); - if (ret != INFERENCE_ENGINE_ERROR_NONE) - LOGE("Fail to SetInputTensorParamNode"); - LOGE("LEAVE"); - return ret; + return engine->Run(input_buffers); } int InferenceEngineCommon::GetInferenceResult(tensor_t& results) diff --git a/include/inference_engine_common.h b/include/inference_engine_common.h index 886d431..ed47a1c 100755 --- a/include/inference_engine_common.h +++ b/include/inference_engine_common.h @@ -30,20 +30,6 @@ public: virtual ~IInferenceEngineCommon() {}; - /** - * @brief Set an input node name. Deprecated. - * - * @since_tizen 5.5 - */ - virtual int SetInputTensorParamNode(std::string node) = 0; - - /** - * @brief Set output nodes' names. Deprecated. - * - * @since_tizen 5.5 - */ - virtual int SetOutputTensorParamNodes(std::vector nodes) = 0; - /** * @brief Set target devices. * @details See #inference_target_type_e @@ -60,46 +46,25 @@ public: virtual int Load(std::vector model_paths, inference_model_format_e model_format) { return 0; } /** - * @brief Create a memory. Deprecated. - * - * @since_tizen 5.5 - */ - virtual int CreateInputLayerPassage() = 0; - - /** - * @brief Get an input layer's type such as float32, float16, and so on. Deprecated. + * @brief Get input tensor buffers from a given backend engine. * - * @since_tizen 5.5 - */ - virtual int GetInputLayerAttrType() = 0; - - /** - * @brief Get an input data pointer. Deprecated. - * - * @since_tizen 5.5 + * @since_tizen 6.0 */ - virtual void* GetInputDataPtr() = 0; + virtual int GetInputTensorBuffers(std::vector &buffers) = 0; /** * @brief Get input layer property information from a given backend engine. * * @since_tizen 6.0 */ - virtual int GetInputTensorProperty(inference_engine_layer_property *property) { return 0; } + virtual int GetInputLayerProperty(inference_engine_layer_property &property) = 0; /** * @brief Get output layer property information from a given backend engine. * * @since_tizen 6.0 */ - virtual int GetOutputTensorProperty(inference_engine_layer_property *property) { return 0; } - - /** - * @brief Set an input data buffer. Deprecated. - * - * @since_tizen 5.5 - */ - virtual int SetInputDataBuffer(tensor_t data) = 0; + virtual int GetOutputLayerProperty(inference_engine_layer_property &property) = 0; /** * @brief Set input layer property information to a given backend engine. @@ -123,21 +88,11 @@ public: virtual int GetBackendCapacity(inference_engine_capacity *capacity) = 0; /** - * @brief Run an inference. Deprecated. - * - * @since_tizen 5.5 - */ - virtual int Run() = 0; - - virtual int Run(std::vector tensor) = 0; - - /** - * @brief Run an inference with user-given input and output buffers. + * @brief Run an inference with user-given input buffers. * * @since_tizen 6.0 */ - virtual int Run(std::vector &input_buffers, - std::vector &output_buffers) { return 0; } + virtual int Run(std::vector &input_buffers) = 0; /** * @brief Get inference results. Deprecated. diff --git a/include/inference_engine_common_impl.h b/include/inference_engine_common_impl.h index a299205..5bfe3d8 100755 --- a/include/inference_engine_common_impl.h +++ b/include/inference_engine_common_impl.h @@ -59,20 +59,6 @@ public: void UnbindBackend(void); - /** - * @brief Set input node name. Deprecated. - * - * @since_tizen 5.5 - */ - int SetInputTensorParamNode(std::string node); - - /** - * @brief Set output nodes' names. Deprecated. - * - * @since_tizen 5.5 - */ - int SetOutputTensorParamNodes(std::vector nodes); - /** * @brief Set target devices. * @details See #inference_target_type_e @@ -89,39 +75,25 @@ public: int Load(std::vector model_paths, inference_model_format_e model_format); /** - * @brief Get an input layer's type such as float32, float16, and so on. Deprecated. - * - * @since_tizen 5.5 - */ - int GetInputLayerAttrType(); - - /** - * @brief Get an input data pointer. Deprecated. + * @brief Get an input tensor buffer/buffers. * - * @since_tizen 5.5 + * @since_tizen 6.0 */ - void* GetInputDataPtr(); + int GetInputTensorBuffers(std::vector &buffers); /** * @brief Get an input layer property information from a given backend engine. * * @since_tizen 6.0 */ - int GetInputTensorProperty(inference_engine_layer_property *property); + int GetInputLayerProperty(inference_engine_layer_property &property); /** * @brief Get an output layer property information from a given backend engine. * * @since_tizen 6.0 */ - int GetOutputTensorProperty(inference_engine_layer_property *property); - - /** - * @brief Set an input data buffer. Deprecated. - * - * @since_tizen 5.5 - */ - int SetInputDataBuffer(tensor_t data); + int GetOutputLayerProperty(inference_engine_layer_property &property); /** * @brief Set an input layer property information to a given backend engine. @@ -144,22 +116,12 @@ public: */ int GetBackendCapacity(inference_engine_capacity *capacity); - /** - * @brief Run an inference. Deprecated. - * - * @since_tizen 5.5 - */ - int Run(); - - int Run(std::vector tensor); - /** * @brief Run an inference with user-given input and output buffers. * * @since_tizen 6.0 */ - int Run(std::vector &input_buffers, - std::vector &output_buffers); + int Run(std::vector &input_buffers); /** * @brief Get inference results. diff --git a/include/inference_engine_type.h b/include/inference_engine_type.h index 925605d..8f0c9fa 100644 --- a/include/inference_engine_type.h +++ b/include/inference_engine_type.h @@ -74,9 +74,9 @@ typedef enum { * */ typedef enum { - TENSOR_SHAPE_NCHW = 0, /**< tensor order is batch size, number of channel, height, width. */ - TENSOR_SHAPE_NHWC, /**< tensor order is batch size, height, width, number of channel. */ -} inference_tensor_shape_e; + TENSOR_SHAPE_NCHW = 0, /**< tensor order is batch size, a number of channels, height, width. */ + TENSOR_SHAPE_NHWC, /**< tensor order is batch size, height, width, a number of channels. */ +} inference_tensor_shape_type_e; /** * @brief Enumeration for tensor data type. @@ -89,7 +89,7 @@ typedef enum { TENSOR_DATA_TYPE_FLOAT32, TENSOR_DATA_TYPE_UINT8, TENSOR_DATA_TYPE_UINT16, - TENSOR_DATA_TYPE_UINT32, + TENSOR_DATA_TYPE_UINT32 } inference_tensor_data_type_e; #define INFERENCE_TARGET_MASK (INFERENCE_TARGET_CPU | INFERENCE_TARGET_GPU | INFERENCE_TARGET_CUSTOM) @@ -154,9 +154,10 @@ typedef struct _inference_engine_tensor_buffer { * @since_tizen 6.0 */ typedef struct _inference_engine_tensor_info { - std::string layer_name; /**< a given layer's name */ - std::vector tensor_shape; /**< a tensor shape of the layer. */ - inference_tensor_data_type_e tensor_type; /**< a tensor type of the layer. */ + std::vector shape; /** a tensor shape. */ + inference_tensor_shape_type_e shape_type; /**< a tensor shape of the layer. */ + inference_tensor_data_type_e data_type; /**< a tensor type of the layer. */ + size_t size; /** tensor buffer size. */ // TODO. } inference_engine_tensor_info; @@ -185,7 +186,7 @@ typedef struct _inference_engine_layer_property { */ typedef struct _inference_engine_capacity { int supported_accel_devices; - inference_tensor_shape_e supported_tensor_shape; + inference_tensor_shape_type_e supported_tensor_shape_type; std::vector supported_nn_models; // TODO. } inference_engine_capacity;