e237be07cfbc9ad0c858c248d5f525541dd5e646
[platform/upstream/opencv.git] / modules / dnn / src / op_inf_engine.cpp
1 // This file is part of OpenCV project.
2 // It is subject to the license terms in the LICENSE file found in the top-level directory
3 // of this distribution and at http://opencv.org/license.html.
4 //
5 // Copyright (C) 2018-2019, Intel Corporation, all rights reserved.
6 // Third party copyrights are property of their respective owners.
7
8 #include "precomp.hpp"
9 #include "op_inf_engine.hpp"
10 #include <opencv2/dnn/shape_utils.hpp>
11
12 #ifdef HAVE_INF_ENGINE
13 #include <ie_extension.h>
14 #elif defined(ENABLE_PLUGINS)
15 // using plugin API
16 #include "backend.hpp"
17 #include "factory.hpp"
18 #endif
19
20 #include <opencv2/core/utils/configuration.private.hpp>
21 #include <opencv2/core/utils/logger.hpp>
22
23 namespace cv { namespace dnn {
24
25 #ifdef HAVE_INF_ENGINE
26
27 CV__DNN_INLINE_NS_BEGIN
28
29 cv::String getInferenceEngineBackendType()
30 {
31     return "NGRAPH";
32 }
33 cv::String setInferenceEngineBackendType(const cv::String& newBackendType)
34 {
35     if (newBackendType != "NGRAPH")
36         CV_Error(Error::StsNotImplemented, cv::format("DNN/IE: only NGRAPH backend is supported: %s", newBackendType.c_str()));
37     return newBackendType;
38 }
39
40 CV__DNN_INLINE_NS_END
41
42
43 Mat infEngineBlobToMat(const InferenceEngine::Blob::Ptr& blob)
44 {
45     // NOTE: Inference Engine sizes are reversed.
46     std::vector<size_t> dims = blob->getTensorDesc().getDims();
47     std::vector<int> size(dims.begin(), dims.end());
48     auto precision = blob->getTensorDesc().getPrecision();
49
50     int type = -1;
51     switch (precision)
52     {
53         case InferenceEngine::Precision::FP32: type = CV_32F; break;
54         case InferenceEngine::Precision::U8: type = CV_8U; break;
55         default:
56             CV_Error(Error::StsNotImplemented, "Unsupported blob precision");
57     }
58     return Mat(size, type, (void*)blob->buffer());
59 }
60
61 void infEngineBlobsToMats(const std::vector<InferenceEngine::Blob::Ptr>& blobs,
62                           std::vector<Mat>& mats)
63 {
64     mats.resize(blobs.size());
65     for (int i = 0; i < blobs.size(); ++i)
66         mats[i] = infEngineBlobToMat(blobs[i]);
67 }
68
69
70 static bool init_IE_plugins()
71 {
72     // load and hold IE plugins
73     static InferenceEngine::Core* init_core = new InferenceEngine::Core();  // 'delete' is never called
74     (void)init_core->GetAvailableDevices();
75     return true;
76 }
77 static InferenceEngine::Core& retrieveIECore(const std::string& id, std::map<std::string, std::shared_ptr<InferenceEngine::Core> >& cores)
78 {
79     AutoLock lock(getInitializationMutex());
80     std::map<std::string, std::shared_ptr<InferenceEngine::Core> >::iterator i = cores.find(id);
81     if (i == cores.end())
82     {
83         std::shared_ptr<InferenceEngine::Core> core = std::make_shared<InferenceEngine::Core>();
84         cores[id] = core;
85         return *core.get();
86     }
87     return *(i->second).get();
88 }
89 static InferenceEngine::Core& create_IE_Core_instance(const std::string& id)
90 {
91     static std::map<std::string, std::shared_ptr<InferenceEngine::Core> > cores;
92     return retrieveIECore(id, cores);
93 }
94 static InferenceEngine::Core& create_IE_Core_pointer(const std::string& id)
95 {
96     // load and hold IE plugins
97     static std::map<std::string, std::shared_ptr<InferenceEngine::Core> >* cores =
98             new std::map<std::string, std::shared_ptr<InferenceEngine::Core> >();
99     return retrieveIECore(id, *cores);
100 }
101 InferenceEngine::Core& getCore(const std::string& id)
102 {
103     // to make happy memory leak tools use:
104     // - OPENCV_DNN_INFERENCE_ENGINE_HOLD_PLUGINS=0
105     // - OPENCV_DNN_INFERENCE_ENGINE_CORE_LIFETIME_WORKAROUND=0
106     static bool param_DNN_INFERENCE_ENGINE_HOLD_PLUGINS = utils::getConfigurationParameterBool("OPENCV_DNN_INFERENCE_ENGINE_HOLD_PLUGINS", true);
107     static bool init_IE_plugins_ = param_DNN_INFERENCE_ENGINE_HOLD_PLUGINS && init_IE_plugins(); CV_UNUSED(init_IE_plugins_);
108
109     static bool param_DNN_INFERENCE_ENGINE_CORE_LIFETIME_WORKAROUND =
110             utils::getConfigurationParameterBool("OPENCV_DNN_INFERENCE_ENGINE_CORE_LIFETIME_WORKAROUND",
111 #ifdef _WIN32
112                 true
113 #else
114                 false
115 #endif
116             );
117
118     InferenceEngine::Core& core = param_DNN_INFERENCE_ENGINE_CORE_LIFETIME_WORKAROUND
119             ? create_IE_Core_pointer(id)
120             : create_IE_Core_instance(id);
121     return core;
122 }
123
124
125 static bool detectArmPlugin_()
126 {
127     InferenceEngine::Core& ie = getCore("CPU");
128     const std::vector<std::string> devices = ie.GetAvailableDevices();
129     for (std::vector<std::string>::const_iterator i = devices.begin(); i != devices.end(); ++i)
130     {
131         if (i->find("CPU") != std::string::npos)
132         {
133             const std::string name = ie.GetMetric(*i, METRIC_KEY(FULL_DEVICE_NAME)).as<std::string>();
134             CV_LOG_INFO(NULL, "CPU plugin: " << name);
135             return name.find("arm_compute::NEON") != std::string::npos;
136         }
137     }
138     return false;
139 }
140
141 #if !defined(OPENCV_DNN_IE_VPU_TYPE_DEFAULT)
142 static bool detectMyriadX_(const std::string& device)
143 {
144     AutoLock lock(getInitializationMutex());
145
146     // Lightweight detection
147     InferenceEngine::Core& ie = getCore(device);
148     const std::vector<std::string> devices = ie.GetAvailableDevices();
149     for (std::vector<std::string>::const_iterator i = devices.begin(); i != devices.end(); ++i)
150     {
151         if (i->find(device) != std::string::npos)
152         {
153             const std::string name = ie.GetMetric(*i, METRIC_KEY(FULL_DEVICE_NAME)).as<std::string>();
154             CV_LOG_INFO(NULL, "Myriad device: " << name);
155             return name.find("MyriadX") != std::string::npos || name.find("Myriad X") != std::string::npos || name.find("HDDL") != std::string::npos;
156         }
157     }
158     return false;
159 }
160 #endif  // !defined(OPENCV_DNN_IE_VPU_TYPE_DEFAULT)
161
162 #endif  // HAVE_INF_ENGINE
163
164
165 CV__DNN_INLINE_NS_BEGIN
166
167 void resetMyriadDevice()
168 {
169 #ifdef HAVE_INF_ENGINE
170     CV_LOG_INFO(NULL, "DNN: Unregistering both 'MYRIAD' and 'HETERO:MYRIAD,CPU' plugins");
171
172     AutoLock lock(getInitializationMutex());
173
174     InferenceEngine::Core& ie = getCore("MYRIAD");
175     try
176     {
177         ie.UnregisterPlugin("MYRIAD");
178         ie.UnregisterPlugin("HETERO");
179     }
180     catch (...) {}
181 #endif  // HAVE_INF_ENGINE
182 }
183
184 void releaseHDDLPlugin()
185 {
186 #ifdef HAVE_INF_ENGINE
187     CV_LOG_INFO(NULL, "DNN: Unregistering both 'HDDL' and 'HETERO:HDDL,CPU' plugins");
188
189     AutoLock lock(getInitializationMutex());
190
191     InferenceEngine::Core& ie = getCore("HDDL");
192     try
193     {
194         ie.UnregisterPlugin("HDDL");
195         ie.UnregisterPlugin("HETERO");
196     }
197     catch (...) {}
198 #endif  // HAVE_INF_ENGINE
199 }
200
201 #ifdef HAVE_INF_ENGINE
202 bool isMyriadX()
203 {
204     static bool myriadX = getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X;
205     return myriadX;
206 }
207
208 bool isArmComputePlugin()
209 {
210     static bool armPlugin = getInferenceEngineCPUType() == CV_DNN_INFERENCE_ENGINE_CPU_TYPE_ARM_COMPUTE;
211     return armPlugin;
212 }
213
214 static std::string getInferenceEngineVPUType_()
215 {
216     static std::string param_vpu_type = utils::getConfigurationParameterString("OPENCV_DNN_IE_VPU_TYPE", "");
217     if (param_vpu_type == "")
218     {
219 #if defined(OPENCV_DNN_IE_VPU_TYPE_DEFAULT)
220         param_vpu_type = OPENCV_DNN_IE_VPU_TYPE_DEFAULT;
221 #else
222         CV_LOG_INFO(NULL, "OpenCV-DNN: running Inference Engine VPU autodetection: Myriad2/X or HDDL. In case of other accelerator types specify 'OPENCV_DNN_IE_VPU_TYPE' parameter");
223         try {
224             bool isMyriadX_ = detectMyriadX_("MYRIAD");
225             bool isHDDL_ = detectMyriadX_("HDDL");
226             if (isMyriadX_ || isHDDL_)
227             {
228                 param_vpu_type = CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X;
229             }
230             else
231             {
232                 param_vpu_type = CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_2;
233             }
234         }
235         catch (...)
236         {
237             CV_LOG_WARNING(NULL, "OpenCV-DNN: Failed Inference Engine VPU autodetection. Specify 'OPENCV_DNN_IE_VPU_TYPE' parameter.");
238             param_vpu_type.clear();
239         }
240 #endif
241     }
242     CV_LOG_INFO(NULL, "OpenCV-DNN: Inference Engine VPU type='" << param_vpu_type << "'");
243     return param_vpu_type;
244 }
245
246 cv::String getInferenceEngineVPUType()
247 {
248     static cv::String vpu_type = getInferenceEngineVPUType_();
249     return vpu_type;
250 }
251
252 cv::String getInferenceEngineCPUType()
253 {
254     static cv::String cpu_type = detectArmPlugin_() ?
255                                  CV_DNN_INFERENCE_ENGINE_CPU_TYPE_ARM_COMPUTE :
256                                  CV_DNN_INFERENCE_ENGINE_CPU_TYPE_X86;
257     return cpu_type;
258 }
259
260
261 namespace openvino {
262
263 bool checkTarget(Target target)
264 {
265     // Lightweight detection
266     const std::vector<std::string> devices = getCore("").GetAvailableDevices();
267     for (std::vector<std::string>::const_iterator i = devices.begin(); i != devices.end(); ++i)
268     {
269         if (std::string::npos != i->find("MYRIAD") && target == DNN_TARGET_MYRIAD)
270             return true;
271         if (std::string::npos != i->find("HDDL") && target == DNN_TARGET_HDDL)
272             return true;
273         else if (std::string::npos != i->find("FPGA") && target == DNN_TARGET_FPGA)
274             return true;
275         else if (std::string::npos != i->find("CPU") && target == DNN_TARGET_CPU)
276             return true;
277         else if (std::string::npos != i->find("GPU") && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
278             return true;
279     }
280     return false;
281 }
282
283 }  // namespace openvino
284
285 #else  // HAVE_INF_ENGINE
286
287
288 namespace openvino {
289
290 bool checkTarget(Target target)
291 {
292 #if defined(ENABLE_PLUGINS)
293     try
294     {
295         auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
296         return networkBackend.checkTarget(target);
297     }
298     catch (const std::exception& e)
299     {
300         CV_LOG_INFO(NULL, "DNN/OpenVINO: checkTarget failed: " << e.what())
301     }
302 #endif
303     return false;
304 }
305
306 }  // namespace openvino
307
308
309 cv::String getInferenceEngineBackendType()
310 {
311 #if defined(ENABLE_PLUGINS)
312     try
313     {
314         auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
315         CV_UNUSED(networkBackend);
316         return CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH;
317     }
318     catch (const std::exception& e)
319     {
320         CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what())
321     }
322 #endif
323     CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support");
324 }
325 cv::String setInferenceEngineBackendType(const cv::String& newBackendType)
326 {
327 #if defined(ENABLE_PLUGINS)
328     try
329     {
330         auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
331         CV_UNUSED(networkBackend);
332         CV_Assert(newBackendType == CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
333     }
334     catch (const std::exception& e)
335     {
336         CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what())
337     }
338 #endif
339     CV_UNUSED(newBackendType);
340     CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support");
341 }
342 cv::String getInferenceEngineVPUType()
343 {
344 #if defined(ENABLE_PLUGINS)
345     try
346     {
347         auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
348         if (networkBackend.checkTarget(DNN_TARGET_MYRIAD))
349             return CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X;  // 2021.4 supports NCS2 only
350         CV_Error(Error::StsError, "DNN/OpenVINO: DNN_TARGET_MYRIAD is not available");
351     }
352     catch (const std::exception& e)
353     {
354         CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what())
355     }
356 #endif
357     CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support");
358 }
359
360 cv::String getInferenceEngineCPUType()
361 {
362 #if defined(ENABLE_PLUGINS)
363     try
364     {
365         auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
366         CV_UNUSED(networkBackend);
367 #if defined(__arm__) || defined(__aarch64__) || defined(_M_ARM64)
368         return CV_DNN_INFERENCE_ENGINE_CPU_TYPE_ARM_COMPUTE;
369 #else
370         return CV_DNN_INFERENCE_ENGINE_CPU_TYPE_X86;
371 #endif
372     }
373     catch (const std::exception& e)
374     {
375         CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what())
376     }
377 #endif
378     CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support");
379 }
380
381 #endif  // HAVE_INF_ENGINE
382
383
384 CV__DNN_INLINE_NS_END
385 }}  // namespace dnn, namespace cv