1 // This file is part of OpenCV project.
2 // It is subject to the license terms in the LICENSE file found in the top-level directory
3 // of this distribution and at http://opencv.org/license.html.
5 // Copyright (C) 2018-2019, Intel Corporation, all rights reserved.
6 // Third party copyrights are property of their respective owners.
9 #include "op_inf_engine.hpp"
10 #include <opencv2/dnn/shape_utils.hpp>
12 #ifdef HAVE_INF_ENGINE
13 #include <ie_extension.h>
14 #elif defined(ENABLE_PLUGINS)
16 #include "backend.hpp"
17 #include "factory.hpp"
20 #include <opencv2/core/utils/configuration.private.hpp>
21 #include <opencv2/core/utils/logger.hpp>
23 namespace cv { namespace dnn {
25 #ifdef HAVE_INF_ENGINE
27 CV__DNN_INLINE_NS_BEGIN
29 cv::String getInferenceEngineBackendType()
33 cv::String setInferenceEngineBackendType(const cv::String& newBackendType)
35 if (newBackendType != "NGRAPH")
36 CV_Error(Error::StsNotImplemented, cv::format("DNN/IE: only NGRAPH backend is supported: %s", newBackendType.c_str()));
37 return newBackendType;
43 Mat infEngineBlobToMat(const InferenceEngine::Blob::Ptr& blob)
45 // NOTE: Inference Engine sizes are reversed.
46 std::vector<size_t> dims = blob->getTensorDesc().getDims();
47 std::vector<int> size(dims.begin(), dims.end());
48 auto precision = blob->getTensorDesc().getPrecision();
53 case InferenceEngine::Precision::FP32: type = CV_32F; break;
54 case InferenceEngine::Precision::U8: type = CV_8U; break;
56 CV_Error(Error::StsNotImplemented, "Unsupported blob precision");
58 return Mat(size, type, (void*)blob->buffer());
61 void infEngineBlobsToMats(const std::vector<InferenceEngine::Blob::Ptr>& blobs,
62 std::vector<Mat>& mats)
64 mats.resize(blobs.size());
65 for (int i = 0; i < blobs.size(); ++i)
66 mats[i] = infEngineBlobToMat(blobs[i]);
70 static bool init_IE_plugins()
72 // load and hold IE plugins
73 static InferenceEngine::Core* init_core = new InferenceEngine::Core(); // 'delete' is never called
74 (void)init_core->GetAvailableDevices();
77 static InferenceEngine::Core& retrieveIECore(const std::string& id, std::map<std::string, std::shared_ptr<InferenceEngine::Core> >& cores)
79 AutoLock lock(getInitializationMutex());
80 std::map<std::string, std::shared_ptr<InferenceEngine::Core> >::iterator i = cores.find(id);
83 std::shared_ptr<InferenceEngine::Core> core = std::make_shared<InferenceEngine::Core>();
87 return *(i->second).get();
89 static InferenceEngine::Core& create_IE_Core_instance(const std::string& id)
91 static std::map<std::string, std::shared_ptr<InferenceEngine::Core> > cores;
92 return retrieveIECore(id, cores);
94 static InferenceEngine::Core& create_IE_Core_pointer(const std::string& id)
96 // load and hold IE plugins
97 static std::map<std::string, std::shared_ptr<InferenceEngine::Core> >* cores =
98 new std::map<std::string, std::shared_ptr<InferenceEngine::Core> >();
99 return retrieveIECore(id, *cores);
101 InferenceEngine::Core& getCore(const std::string& id)
103 // to make happy memory leak tools use:
104 // - OPENCV_DNN_INFERENCE_ENGINE_HOLD_PLUGINS=0
105 // - OPENCV_DNN_INFERENCE_ENGINE_CORE_LIFETIME_WORKAROUND=0
106 static bool param_DNN_INFERENCE_ENGINE_HOLD_PLUGINS = utils::getConfigurationParameterBool("OPENCV_DNN_INFERENCE_ENGINE_HOLD_PLUGINS", true);
107 static bool init_IE_plugins_ = param_DNN_INFERENCE_ENGINE_HOLD_PLUGINS && init_IE_plugins(); CV_UNUSED(init_IE_plugins_);
109 static bool param_DNN_INFERENCE_ENGINE_CORE_LIFETIME_WORKAROUND =
110 utils::getConfigurationParameterBool("OPENCV_DNN_INFERENCE_ENGINE_CORE_LIFETIME_WORKAROUND",
118 InferenceEngine::Core& core = param_DNN_INFERENCE_ENGINE_CORE_LIFETIME_WORKAROUND
119 ? create_IE_Core_pointer(id)
120 : create_IE_Core_instance(id);
125 static bool detectArmPlugin_()
127 InferenceEngine::Core& ie = getCore("CPU");
128 const std::vector<std::string> devices = ie.GetAvailableDevices();
129 for (std::vector<std::string>::const_iterator i = devices.begin(); i != devices.end(); ++i)
131 if (i->find("CPU") != std::string::npos)
133 const std::string name = ie.GetMetric(*i, METRIC_KEY(FULL_DEVICE_NAME)).as<std::string>();
134 CV_LOG_INFO(NULL, "CPU plugin: " << name);
135 return name.find("arm_compute::NEON") != std::string::npos;
141 #if !defined(OPENCV_DNN_IE_VPU_TYPE_DEFAULT)
142 static bool detectMyriadX_(const std::string& device)
144 AutoLock lock(getInitializationMutex());
146 // Lightweight detection
147 InferenceEngine::Core& ie = getCore(device);
148 const std::vector<std::string> devices = ie.GetAvailableDevices();
149 for (std::vector<std::string>::const_iterator i = devices.begin(); i != devices.end(); ++i)
151 if (i->find(device) != std::string::npos)
153 const std::string name = ie.GetMetric(*i, METRIC_KEY(FULL_DEVICE_NAME)).as<std::string>();
154 CV_LOG_INFO(NULL, "Myriad device: " << name);
155 return name.find("MyriadX") != std::string::npos || name.find("Myriad X") != std::string::npos || name.find("HDDL") != std::string::npos;
160 #endif // !defined(OPENCV_DNN_IE_VPU_TYPE_DEFAULT)
162 #endif // HAVE_INF_ENGINE
165 CV__DNN_INLINE_NS_BEGIN
167 void resetMyriadDevice()
169 #ifdef HAVE_INF_ENGINE
170 CV_LOG_INFO(NULL, "DNN: Unregistering both 'MYRIAD' and 'HETERO:MYRIAD,CPU' plugins");
172 AutoLock lock(getInitializationMutex());
174 InferenceEngine::Core& ie = getCore("MYRIAD");
177 ie.UnregisterPlugin("MYRIAD");
178 ie.UnregisterPlugin("HETERO");
181 #endif // HAVE_INF_ENGINE
184 void releaseHDDLPlugin()
186 #ifdef HAVE_INF_ENGINE
187 CV_LOG_INFO(NULL, "DNN: Unregistering both 'HDDL' and 'HETERO:HDDL,CPU' plugins");
189 AutoLock lock(getInitializationMutex());
191 InferenceEngine::Core& ie = getCore("HDDL");
194 ie.UnregisterPlugin("HDDL");
195 ie.UnregisterPlugin("HETERO");
198 #endif // HAVE_INF_ENGINE
201 #ifdef HAVE_INF_ENGINE
204 static bool myriadX = getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X;
208 bool isArmComputePlugin()
210 static bool armPlugin = getInferenceEngineCPUType() == CV_DNN_INFERENCE_ENGINE_CPU_TYPE_ARM_COMPUTE;
214 static std::string getInferenceEngineVPUType_()
216 static std::string param_vpu_type = utils::getConfigurationParameterString("OPENCV_DNN_IE_VPU_TYPE", "");
217 if (param_vpu_type == "")
219 #if defined(OPENCV_DNN_IE_VPU_TYPE_DEFAULT)
220 param_vpu_type = OPENCV_DNN_IE_VPU_TYPE_DEFAULT;
222 CV_LOG_INFO(NULL, "OpenCV-DNN: running Inference Engine VPU autodetection: Myriad2/X or HDDL. In case of other accelerator types specify 'OPENCV_DNN_IE_VPU_TYPE' parameter");
224 bool isMyriadX_ = detectMyriadX_("MYRIAD");
225 bool isHDDL_ = detectMyriadX_("HDDL");
226 if (isMyriadX_ || isHDDL_)
228 param_vpu_type = CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X;
232 param_vpu_type = CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_2;
237 CV_LOG_WARNING(NULL, "OpenCV-DNN: Failed Inference Engine VPU autodetection. Specify 'OPENCV_DNN_IE_VPU_TYPE' parameter.");
238 param_vpu_type.clear();
242 CV_LOG_INFO(NULL, "OpenCV-DNN: Inference Engine VPU type='" << param_vpu_type << "'");
243 return param_vpu_type;
246 cv::String getInferenceEngineVPUType()
248 static cv::String vpu_type = getInferenceEngineVPUType_();
252 cv::String getInferenceEngineCPUType()
254 static cv::String cpu_type = detectArmPlugin_() ?
255 CV_DNN_INFERENCE_ENGINE_CPU_TYPE_ARM_COMPUTE :
256 CV_DNN_INFERENCE_ENGINE_CPU_TYPE_X86;
263 bool checkTarget(Target target)
265 // Lightweight detection
266 const std::vector<std::string> devices = getCore("").GetAvailableDevices();
267 for (std::vector<std::string>::const_iterator i = devices.begin(); i != devices.end(); ++i)
269 if (std::string::npos != i->find("MYRIAD") && target == DNN_TARGET_MYRIAD)
271 if (std::string::npos != i->find("HDDL") && target == DNN_TARGET_HDDL)
273 else if (std::string::npos != i->find("FPGA") && target == DNN_TARGET_FPGA)
275 else if (std::string::npos != i->find("CPU") && target == DNN_TARGET_CPU)
277 else if (std::string::npos != i->find("GPU") && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
283 } // namespace openvino
285 #else // HAVE_INF_ENGINE
290 bool checkTarget(Target target)
292 #if defined(ENABLE_PLUGINS)
295 auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
296 return networkBackend.checkTarget(target);
298 catch (const std::exception& e)
300 CV_LOG_INFO(NULL, "DNN/OpenVINO: checkTarget failed: " << e.what())
306 } // namespace openvino
309 cv::String getInferenceEngineBackendType()
311 #if defined(ENABLE_PLUGINS)
314 auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
315 CV_UNUSED(networkBackend);
316 return CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH;
318 catch (const std::exception& e)
320 CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what())
323 CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support");
325 cv::String setInferenceEngineBackendType(const cv::String& newBackendType)
327 #if defined(ENABLE_PLUGINS)
330 auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
331 CV_UNUSED(networkBackend);
332 CV_Assert(newBackendType == CV_DNN_BACKEND_INFERENCE_ENGINE_NGRAPH);
334 catch (const std::exception& e)
336 CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what())
339 CV_UNUSED(newBackendType);
340 CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support");
342 cv::String getInferenceEngineVPUType()
344 #if defined(ENABLE_PLUGINS)
347 auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
348 if (networkBackend.checkTarget(DNN_TARGET_MYRIAD))
349 return CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X; // 2021.4 supports NCS2 only
350 CV_Error(Error::StsError, "DNN/OpenVINO: DNN_TARGET_MYRIAD is not available");
352 catch (const std::exception& e)
354 CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what())
357 CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support");
360 cv::String getInferenceEngineCPUType()
362 #if defined(ENABLE_PLUGINS)
365 auto& networkBackend = dnn_backend::createPluginDNNNetworkBackend("openvino");
366 CV_UNUSED(networkBackend);
367 #if defined(__arm__) || defined(__aarch64__) || defined(_M_ARM64)
368 return CV_DNN_INFERENCE_ENGINE_CPU_TYPE_ARM_COMPUTE;
370 return CV_DNN_INFERENCE_ENGINE_CPU_TYPE_X86;
373 catch (const std::exception& e)
375 CV_LOG_INFO(NULL, "DNN/OpenVINO: plugin is not available: " << e.what())
378 CV_Error(Error::StsNotImplemented, "This OpenCV build doesn't include InferenceEngine support");
381 #endif // HAVE_INF_ENGINE
384 CV__DNN_INLINE_NS_END
385 }} // namespace dnn, namespace cv