ad7cc46196701f52c9fc56a4482d819d80dd66a8
[platform/core/multimedia/inference-engine-opencv.git] / src / inference_engine_opencv_private.h
1 /**
2  * Copyright (c) 2019 Samsung Electronics Co., Ltd All Rights Reserved
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16
17 #ifndef __INFERENCE_ENGINE_IMPL_OPENCV_H__
18 #define __INFERENCE_ENGINE_IMPL_OPENCV_H__
19
20 #include <inference_engine_common.h>
21
22 #include <dlog.h>
23
24 #include <opencv2/dnn.hpp>
25 #include <opencv2/imgproc.hpp>
26 /**
27  * @file inference_engine_opencv_private.h
28  * @brief This file contains the InferenceOpenCV class which
29  *        provide OpenCV based inference functionality
30  */
31
32 #ifdef LOG_TAG
33 #undef LOG_TAG
34 #endif
35
36 #define LOG_TAG "INFERENCE_ENGINE_OPENCV"
37
38 using namespace InferenceEngineInterface::Common;
39
40 namespace InferenceEngineImpl
41 {
42 namespace OpenCVImpl
43 {
44         class InferenceOpenCV : public IInferenceEngineCommon
45         {
46         public:
47                 InferenceOpenCV();
48                 ~InferenceOpenCV();
49
50                 int SetPrivateData(void *data) override;
51
52                 int SetTargetDevices(int types) override;
53
54                 int SetCLTuner(const inference_engine_cltuner *cltuner) final;
55
56                 int Load(std::vector<std::string> model_paths,
57                                  inference_model_format_e model_format) override;
58
59                 int GetInputTensorBuffers(
60                                 std::vector<inference_engine_tensor_buffer> &buffers) override;
61
62                 int GetOutputTensorBuffers(
63                                 std::vector<inference_engine_tensor_buffer> &buffers) override;
64
65                 int GetInputLayerProperty(
66                                 inference_engine_layer_property &property) override;
67
68                 int GetOutputLayerProperty(
69                                 inference_engine_layer_property &property) override;
70
71                 int SetInputLayerProperty(
72                                 inference_engine_layer_property &property) override;
73
74                 int SetOutputLayerProperty(
75                                 inference_engine_layer_property &property) override;
76
77                 int GetBackendCapacity(inference_engine_capacity *capacity) override;
78
79                 int Run(std::vector<inference_engine_tensor_buffer> &input_buffers,
80                                 std::vector<inference_engine_tensor_buffer> &output_buffers)
81                                 override;
82
83         private:
84                 std::vector<cv::Mat> mInputData;
85                 cv::Mat mInputBlobs;
86
87                 std::vector<inference_engine_tensor_info> mInputTensorInfo;
88                 std::vector<inference_engine_tensor_info> mOutputTensorInfo;
89                 std::vector<cv::Mat> mOutputBlobs;
90                 cv::dnn::Net mNet; /**< Network associated with a network model */
91
92                 std::vector<std::string> mInputLayers;
93                 std::vector<std::string> mOutputLayers;
94
95                 std::string mConfigFile;
96                 std::string mWeightFile;
97         };
98
99 } /* InferenceEngineImpl */
100 } /* OpenCV */
101
102 #endif /* __INFERENCE_ENGINE_IMPL_OPENCV_H__ */