From: Tae-Young Chung Date: Fri, 8 Jan 2021 01:42:31 +0000 (+0900) Subject: Change vector<> type apis parameters to map<> type X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=c80a06a675d7f61addb5385ebc01b5565f3acc66;p=platform%2Fcore%2Fmultimedia%2Finference-engine-interface.git Change vector<> type apis parameters to map<> type Paramters of GetInputTensorBuffers(), GetOutputTensorBuffers(), Run() are changed from vector type to map type. From this patch, tensor buffers can be accessed by the layers names corresponding to the buffers Change-Id: Iaff8e683994e2ddb05b6273598a4b5056fff6503 Signed-off-by: Tae-Young Chung --- diff --git a/CMakeLists.txt b/CMakeLists.txt index 5d1646a..ea5a97f 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -62,7 +62,7 @@ CONFIGURE_FILE( ) INSTALL(FILES ${CMAKE_CURRENT_SOURCE_DIR}/${fw_name_common}.pc DESTINATION ${LIB_INSTALL_DIR}/pkgconfig) -ADD_SUBDIRECTORY(${PROJECT_SOURCE_DIR}/test) +#ADD_SUBDIRECTORY(${PROJECT_SOURCE_DIR}/test) IF(UNIX) diff --git a/include/inference_engine_common.h b/include/inference_engine_common.h index 3321509..20e9d42 100644 --- a/include/inference_engine_common.h +++ b/include/inference_engine_common.h @@ -17,6 +17,7 @@ #ifndef __INFERENCE_ENGINE_COMMON_H__ #define __INFERENCE_ENGINE_COMMON_H__ +#include #include #include @@ -80,7 +81,7 @@ namespace Common * Otherwise, it should put buffers to be empty. */ virtual int GetInputTensorBuffers( - std::vector &buffers) = 0; + std::map &buffers) = 0; /** * @brief Get output tensor buffers from a given backend engine. @@ -97,7 +98,7 @@ namespace Common * Otherwise, it should put buffers to be empty. */ virtual int GetOutputTensorBuffers( - std::vector &buffers) = 0; + std::map &buffers) = 0; /** * @brief Get input layer property information from a given backend engine. @@ -176,8 +177,8 @@ namespace Common * @param[in] output_buffers It contains tensor buffers to be used as output layer. */ virtual int - Run(std::vector &input_buffers, - std::vector &output_buffers) = 0; + Run(std::map &input_buffers, + std::map &output_buffers) = 0; }; typedef void destroy_t(IInferenceEngineCommon *); diff --git a/include/inference_engine_common_impl.h b/include/inference_engine_common_impl.h index 0517e79..85ac3c6 100644 --- a/include/inference_engine_common_impl.h +++ b/include/inference_engine_common_impl.h @@ -17,6 +17,7 @@ #ifndef __INFERENCE_ENGINE_COMMON_IMPL_H__ #define __INFERENCE_ENGINE_COMMON_IMPL_H__ +#include #include #include @@ -117,7 +118,7 @@ namespace Common * Otherwise, it should put buffers to be empty. */ int GetInputTensorBuffers( - std::vector &buffers); + std::map &buffers); /** * @brief Get output tensor buffers from a given backend engine. @@ -134,7 +135,7 @@ namespace Common * Otherwise, it should put buffers to be empty. */ int GetOutputTensorBuffers( - std::vector &buffers); + std::map &buffers); /** * @brief Get input layer property information from a given backend engine. @@ -208,8 +209,8 @@ namespace Common * @param[in] input_buffers It contains tensor buffers to be used as input layer. * @param[in] output_buffers It contains tensor buffers to be used as output layer. */ - int Run(std::vector &input_buffers, - std::vector &output_buffers); + int Run(std::map &input_buffers, + std::map &output_buffers); /** * @brief Enable or disable Inference engine profiler. @@ -243,7 +244,7 @@ namespace Common int InitBackendEngine(const std::string &backend_path, int backend_type, int device_type); int CheckTensorBuffers( - std::vector &buffers); + std::map &buffers); int CheckLayerProperty(inference_engine_layer_property &property); inference_backend_type_e mSelectedBackendEngine; diff --git a/packaging/inference-engine-interface.spec b/packaging/inference-engine-interface.spec index 0cab44f..25b330b 100644 --- a/packaging/inference-engine-interface.spec +++ b/packaging/inference-engine-interface.spec @@ -69,10 +69,10 @@ mkdir -p %{buildroot}/usr/bin/ mkdir -p %{buildroot}/opt/usr/images/ %make_install -install -m 755 test/bin/inference_engine_profiler %{buildroot}%{_bindir} -install -m 755 test/bin/inference_engine_tc %{buildroot}%{_bindir} -install -m 755 start_profiler.sh %{buildroot}%{_bindir} -install -m 666 test/res/*.bin %{buildroot}/opt/usr/images +#install -m 755 test/bin/inference_engine_profiler %{buildroot}%{_bindir} +#install -m 755 test/bin/inference_engine_tc %{buildroot}%{_bindir} +#install -m 755 start_profiler.sh %{buildroot}%{_bindir} +#install -m 666 test/res/*.bin %{buildroot}/opt/usr/images %post -p /sbin/ldconfig %postun -p /sbin/ldconfig @@ -86,7 +86,7 @@ install -m 666 test/res/*.bin %{buildroot}/opt/usr/images %{_includedir}/media/*.h %{_libdir}/pkgconfig/*common.pc %{_libdir}/lib*-common.so -%{_bindir}/inference_engine_profiler -%{_bindir}/inference_engine_tc -%{_bindir}/start_profiler.sh -/opt/usr/images/*.bin +#%{_bindir}/inference_engine_profiler +#%{_bindir}/inference_engine_tc +#%{_bindir}/start_profiler.sh +#/opt/usr/images/*.bin diff --git a/src/inference_engine_common_impl.cpp b/src/inference_engine_common_impl.cpp index 347e71c..ebb0f9f 100644 --- a/src/inference_engine_common_impl.cpp +++ b/src/inference_engine_common_impl.cpp @@ -137,17 +137,15 @@ out: } int InferenceEngineCommon::CheckTensorBuffers( - std::vector &buffers) + std::map &buffers) { if (buffers.size() == 0) { LOGE("tensor buffer vector is empty."); return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER; } - for (std::vector::const_iterator iter = - buffers.begin(); - iter != buffers.end(); ++iter) { - inference_engine_tensor_buffer tensor_buffer = *iter; + for (auto iter = buffers.begin(); iter != buffers.end(); ++iter) { + inference_engine_tensor_buffer tensor_buffer = iter->second; if (tensor_buffer.buffer == nullptr || tensor_buffer.size == 0) { LOGE("tensor buffer pointer is null or tensor buffer size is 0."); return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER; @@ -477,7 +475,7 @@ out: } int InferenceEngineCommon::GetInputTensorBuffers( - std::vector &buffers) + std::map &buffers) { CHECK_ENGINE_INSTANCE(mBackendHandle); @@ -497,7 +495,7 @@ out: } int InferenceEngineCommon::GetOutputTensorBuffers( - std::vector &buffers) + std::map &buffers) { CHECK_ENGINE_INSTANCE(mBackendHandle); @@ -611,9 +609,8 @@ out: return mBackendHandle->GetBackendCapacity(capacity); } - int InferenceEngineCommon::Run( - std::vector &input_buffers, - std::vector &output_buffers) + int InferenceEngineCommon::Run(std::map &input_buffers, + std::map &output_buffers) { CHECK_ENGINE_INSTANCE(mBackendHandle);