Change vector<> type apis parameters to map<> type
authorTae-Young Chung <ty83.chung@samsung.com>
Fri, 8 Jan 2021 01:42:31 +0000 (10:42 +0900)
committerTae-Young Chung <ty83.chung@samsung.com>
Fri, 8 Jan 2021 01:42:36 +0000 (10:42 +0900)
Paramters of GetInputTensorBuffers(), GetOutputTensorBuffers(), Run() are
changed from vector<inference_engine_tensor_buffer> type to
map<string, inference_engine_tensor_buffer> type.

From this patch, tensor buffers can be accessed by the layers names
corresponding to the buffers

Change-Id: Iaff8e683994e2ddb05b6273598a4b5056fff6503
Signed-off-by: Tae-Young Chung <ty83.chung@samsung.com>
CMakeLists.txt
include/inference_engine_common.h
include/inference_engine_common_impl.h
packaging/inference-engine-interface.spec
src/inference_engine_common_impl.cpp

index 5d1646ad27adf9248dbad128b2dc204c254674d9..ea5a97fbc49aedb3e988005ddbbc2625bc307a0b 100644 (file)
@@ -62,7 +62,7 @@ CONFIGURE_FILE(
 )
 INSTALL(FILES ${CMAKE_CURRENT_SOURCE_DIR}/${fw_name_common}.pc DESTINATION ${LIB_INSTALL_DIR}/pkgconfig)
 
-ADD_SUBDIRECTORY(${PROJECT_SOURCE_DIR}/test)
+#ADD_SUBDIRECTORY(${PROJECT_SOURCE_DIR}/test)
 
 IF(UNIX)
 
index 3321509c6beb2e91369d4415e45a9a9c33b8992a..20e9d42eeec156b36f953e49156ba31ebfb774ec 100644 (file)
@@ -17,6 +17,7 @@
 #ifndef __INFERENCE_ENGINE_COMMON_H__
 #define __INFERENCE_ENGINE_COMMON_H__
 
+#include <map>
 #include <vector>
 #include <string>
 
@@ -80,7 +81,7 @@ namespace Common
                 *              Otherwise, it should put buffers to be empty.
                 */
                virtual int GetInputTensorBuffers(
-                               std::vector<inference_engine_tensor_buffer> &buffers) = 0;
+                               std::map<std::string, inference_engine_tensor_buffer> &buffers) = 0;
 
                /**
                 * @brief Get output tensor buffers from a given backend engine.
@@ -97,7 +98,7 @@ namespace Common
                 *              Otherwise, it should put buffers to be empty.
                 */
                virtual int GetOutputTensorBuffers(
-                               std::vector<inference_engine_tensor_buffer> &buffers) = 0;
+                               std::map<std::string, inference_engine_tensor_buffer> &buffers) = 0;
 
                /**
                 * @brief Get input layer property information from a given backend engine.
@@ -176,8 +177,8 @@ namespace Common
                 * @param[in] output_buffers It contains tensor buffers to be used as output layer.
                 */
                virtual int
-               Run(std::vector<inference_engine_tensor_buffer> &input_buffers,
-                       std::vector<inference_engine_tensor_buffer> &output_buffers) = 0;
+               Run(std::map<std::string, inference_engine_tensor_buffer> &input_buffers,
+                       std::map<std::string, inference_engine_tensor_buffer> &output_buffers) = 0;
        };
 
        typedef void destroy_t(IInferenceEngineCommon *);
index 0517e799649a693e799b4378a19cf962961b7572..85ac3c6626e2f0348c449b8a4ddd90f5d11810d2 100644 (file)
@@ -17,6 +17,7 @@
 #ifndef __INFERENCE_ENGINE_COMMON_IMPL_H__
 #define __INFERENCE_ENGINE_COMMON_IMPL_H__
 
+#include <map>
 #include <vector>
 #include <string>
 
@@ -117,7 +118,7 @@ namespace Common
                 *              Otherwise, it should put buffers to be empty.
                 */
                int GetInputTensorBuffers(
-                               std::vector<inference_engine_tensor_buffer> &buffers);
+                               std::map<std::string, inference_engine_tensor_buffer> &buffers);
 
                /**
                 * @brief Get output tensor buffers from a given backend engine.
@@ -134,7 +135,7 @@ namespace Common
                 *              Otherwise, it should put buffers to be empty.
                 */
                int GetOutputTensorBuffers(
-                               std::vector<inference_engine_tensor_buffer> &buffers);
+                               std::map<std::string, inference_engine_tensor_buffer> &buffers);
 
                /**
                 * @brief Get input layer property information from a given backend engine.
@@ -208,8 +209,8 @@ namespace Common
                 * @param[in] input_buffers It contains tensor buffers to be used as input layer.
                 * @param[in] output_buffers It contains tensor buffers to be used as output layer.
                 */
-               int Run(std::vector<inference_engine_tensor_buffer> &input_buffers,
-                               std::vector<inference_engine_tensor_buffer> &output_buffers);
+               int  Run(std::map<std::string, inference_engine_tensor_buffer> &input_buffers,
+                               std::map<std::string, inference_engine_tensor_buffer> &output_buffers);
 
                /**
                 * @brief Enable or disable Inference engine profiler.
@@ -243,7 +244,7 @@ namespace Common
                int InitBackendEngine(const std::string &backend_path,
                                                          int backend_type, int device_type);
                int CheckTensorBuffers(
-                               std::vector<inference_engine_tensor_buffer> &buffers);
+                               std::map<std::string, inference_engine_tensor_buffer> &buffers);
                int CheckLayerProperty(inference_engine_layer_property &property);
 
                inference_backend_type_e mSelectedBackendEngine;
index 0cab44f4e20001f1bce63f05c35ae37cc10246f9..25b330b08eac4e1146d4e0e3a5a8098dc448a2f4 100644 (file)
@@ -69,10 +69,10 @@ mkdir -p %{buildroot}/usr/bin/
 mkdir -p %{buildroot}/opt/usr/images/
 %make_install
 
-install -m 755 test/bin/inference_engine_profiler %{buildroot}%{_bindir}
-install -m 755 test/bin/inference_engine_tc %{buildroot}%{_bindir}
-install -m 755 start_profiler.sh %{buildroot}%{_bindir}
-install -m 666 test/res/*.bin %{buildroot}/opt/usr/images
+#install -m 755 test/bin/inference_engine_profiler %{buildroot}%{_bindir}
+#install -m 755 test/bin/inference_engine_tc %{buildroot}%{_bindir}
+#install -m 755 start_profiler.sh %{buildroot}%{_bindir}
+#install -m 666 test/res/*.bin %{buildroot}/opt/usr/images
 
 %post -p /sbin/ldconfig
 %postun -p /sbin/ldconfig
@@ -86,7 +86,7 @@ install -m 666 test/res/*.bin %{buildroot}/opt/usr/images
 %{_includedir}/media/*.h
 %{_libdir}/pkgconfig/*common.pc
 %{_libdir}/lib*-common.so
-%{_bindir}/inference_engine_profiler
-%{_bindir}/inference_engine_tc
-%{_bindir}/start_profiler.sh
-/opt/usr/images/*.bin
+#%{_bindir}/inference_engine_profiler
+#%{_bindir}/inference_engine_tc
+#%{_bindir}/start_profiler.sh
+#/opt/usr/images/*.bin
index 347e71c41c4a8e54fa5dafa582349d3ff5e4677a..ebb0f9fb526d3f7b62a25d4da147f56907170c97 100644 (file)
@@ -137,17 +137,15 @@ out:
        }
 
        int InferenceEngineCommon::CheckTensorBuffers(
-                       std::vector<inference_engine_tensor_buffer> &buffers)
+                       std::map<std::string, inference_engine_tensor_buffer> &buffers)
        {
                if (buffers.size() == 0) {
                        LOGE("tensor buffer vector is empty.");
                        return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
                }
 
-               for (std::vector<inference_engine_tensor_buffer>::const_iterator iter =
-                                        buffers.begin();
-                        iter != buffers.end(); ++iter) {
-                       inference_engine_tensor_buffer tensor_buffer = *iter;
+               for (auto iter = buffers.begin(); iter != buffers.end(); ++iter) {
+                       inference_engine_tensor_buffer tensor_buffer = iter->second;
                        if (tensor_buffer.buffer == nullptr || tensor_buffer.size == 0) {
                                LOGE("tensor buffer pointer is null or tensor buffer size is 0.");
                                return INFERENCE_ENGINE_ERROR_INVALID_PARAMETER;
@@ -477,7 +475,7 @@ out:
        }
 
        int InferenceEngineCommon::GetInputTensorBuffers(
-                       std::vector<inference_engine_tensor_buffer> &buffers)
+                       std::map<std::string, inference_engine_tensor_buffer> &buffers)
        {
                CHECK_ENGINE_INSTANCE(mBackendHandle);
 
@@ -497,7 +495,7 @@ out:
        }
 
        int InferenceEngineCommon::GetOutputTensorBuffers(
-                       std::vector<inference_engine_tensor_buffer> &buffers)
+                       std::map<std::string, inference_engine_tensor_buffer> &buffers)
        {
                CHECK_ENGINE_INSTANCE(mBackendHandle);
 
@@ -611,9 +609,8 @@ out:
                return mBackendHandle->GetBackendCapacity(capacity);
        }
 
-       int InferenceEngineCommon::Run(
-                       std::vector<inference_engine_tensor_buffer> &input_buffers,
-                       std::vector<inference_engine_tensor_buffer> &output_buffers)
+       int  InferenceEngineCommon::Run(std::map<std::string, inference_engine_tensor_buffer> &input_buffers,
+                       std::map<std::string, inference_engine_tensor_buffer> &output_buffers)
        {
                CHECK_ENGINE_INSTANCE(mBackendHandle);