Change members of inference_engine_layer_property structure,
[platform/core/multimedia/inference-engine-mlapi.git] / src / inference_engine_mlapi_private.h
index 5f9fa32..fe39594 100644 (file)
@@ -51,10 +51,10 @@ namespace MLAPIImpl
                                 inference_model_format_e model_format) override;
 
                int GetInputTensorBuffers(
-                               std::vector<inference_engine_tensor_buffer> &buffers) override;
+                               std::map<std::string, inference_engine_tensor_buffer> &buffers) override;
 
                int GetOutputTensorBuffers(
-                               std::vector<inference_engine_tensor_buffer> &buffers) override;
+                               std::map<std::string, inference_engine_tensor_buffer> &buffers) override;
 
                int GetInputLayerProperty(
                                inference_engine_layer_property &property) override;
@@ -70,14 +70,14 @@ namespace MLAPIImpl
 
                int GetBackendCapacity(inference_engine_capacity *capacity) override;
 
-               int Run(std::vector<inference_engine_tensor_buffer> &input_buffers,
-                               std::vector<inference_engine_tensor_buffer> &output_buffers)
+               int Run(std::map<std::string, inference_engine_tensor_buffer> &input_buffers,
+                               std::map<std::string, inference_engine_tensor_buffer> &output_buffers)
                                override;
 
        private:
                int CheckTensorBuffers(
-                               std::vector<inference_engine_tensor_buffer> &input_buffers,
-                               std::vector<inference_engine_tensor_buffer> &output_buffers);
+                               std::map<std::string, inference_engine_tensor_buffer> &input_buffers,
+                               std::map<std::string, inference_engine_tensor_buffer> &output_buffers);
                int ConvertTensorType(int tensor_type);
                int UpdateTensorsInfo();
 
@@ -88,8 +88,8 @@ namespace MLAPIImpl
                ml_tensors_info_h mOutputInfoHandle;
                ml_tensors_data_h mInputDataHandle;
                ml_tensors_data_h mOutputDataHandle;
-               std::vector<std::string> mDesignated_inputs;
-               std::vector<std::string> mDesignated_outputs;
+               std::map<std::string, int> mDesignated_inputs;
+               std::map<std::string, int> mDesignated_outputs;
                inference_engine_layer_property mInputProperty;
                inference_engine_layer_property mOutputProperty;
        };