Change members of inference_engine_layer_property structure,
[platform/core/multimedia/inference-engine-mlapi.git] / src / inference_engine_mlapi_private.h
1 /**
2  * Copyright (c) 2020 Samsung Electronics Co., Ltd All Rights Reserved
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16
17 #ifndef __INFERENCE_ENGINE_NNSTREAMER_PRIVATE_H__
18 #define __INFERENCE_ENGINE_NNSTREAMER_PRIVATE_H__
19
20 #include <inference_engine_common.h>
21 #include <nnstreamer-single.h>
22
23 #include <memory>
24 #include <dlog.h>
25
26 #ifdef LOG_TAG
27 #undef LOG_TAG
28 #endif
29
30 #define LOG_TAG "INFERENCE_ENGINE_MLAPI"
31
32 using namespace InferenceEngineInterface::Common;
33
34 namespace InferenceEngineImpl
35 {
36 namespace MLAPIImpl
37 {
38         class InferenceMLAPI : public IInferenceEngineCommon
39         {
40         public:
41                 InferenceMLAPI();
42                 ~InferenceMLAPI();
43
44                 int SetPrivateData(void *data) override;
45
46                 int SetTargetDevices(int types) override;
47
48                 int SetCLTuner(const inference_engine_cltuner *cltuner) final;
49
50                 int Load(std::vector<std::string> model_paths,
51                                  inference_model_format_e model_format) override;
52
53                 int GetInputTensorBuffers(
54                                 std::map<std::string, inference_engine_tensor_buffer> &buffers) override;
55
56                 int GetOutputTensorBuffers(
57                                 std::map<std::string, inference_engine_tensor_buffer> &buffers) override;
58
59                 int GetInputLayerProperty(
60                                 inference_engine_layer_property &property) override;
61
62                 int GetOutputLayerProperty(
63                                 inference_engine_layer_property &property) override;
64
65                 int SetInputLayerProperty(
66                                 inference_engine_layer_property &property) override;
67
68                 int SetOutputLayerProperty(
69                                 inference_engine_layer_property &property) override;
70
71                 int GetBackendCapacity(inference_engine_capacity *capacity) override;
72
73                 int Run(std::map<std::string, inference_engine_tensor_buffer> &input_buffers,
74                                 std::map<std::string, inference_engine_tensor_buffer> &output_buffers)
75                                 override;
76
77         private:
78                 int CheckTensorBuffers(
79                                 std::map<std::string, inference_engine_tensor_buffer> &input_buffers,
80                                 std::map<std::string, inference_engine_tensor_buffer> &output_buffers);
81                 int ConvertTensorType(int tensor_type);
82                 int UpdateTensorsInfo();
83
84                 int mPluginType;
85                 int mTargetDevice;
86                 ml_single_h mSingle;
87                 ml_tensors_info_h mInputInfoHandle;
88                 ml_tensors_info_h mOutputInfoHandle;
89                 ml_tensors_data_h mInputDataHandle;
90                 ml_tensors_data_h mOutputDataHandle;
91                 std::map<std::string, int> mDesignated_inputs;
92                 std::map<std::string, int> mDesignated_outputs;
93                 inference_engine_layer_property mInputProperty;
94                 inference_engine_layer_property mOutputProperty;
95         };
96
97 } /* InferenceEngineImpl */
98 } /* MLAPIImpl */
99
100 #endif /* __INFERENCE_ENGINE_NNSTREAMER_PRIVATE_H__ */