From f3a6ae5f00428943ba7cbdfe5abab3cb90e1c237 Mon Sep 17 00:00:00 2001 From: Dmitry Kurtaev Date: Tue, 5 Jun 2018 17:18:14 +0300 Subject: [PATCH] Wrap Inference Engine init to try-catch --- modules/dnn/perf/perf_net.cpp | 2 +- modules/dnn/src/dnn.cpp | 8 +++- modules/dnn/src/op_inf_engine.cpp | 75 +++++++++++++++++++++++++------------- modules/dnn/test/test_backends.cpp | 2 +- 4 files changed, 59 insertions(+), 28 deletions(-) diff --git a/modules/dnn/perf/perf_net.cpp b/modules/dnn/perf/perf_net.cpp index 8507a21..fff74df 100644 --- a/modules/dnn/perf/perf_net.cpp +++ b/modules/dnn/perf/perf_net.cpp @@ -34,7 +34,7 @@ public: void processNet(std::string weights, std::string proto, std::string halide_scheduler, const Mat& input, const std::string& outputLayer = "") { - if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL) + if (backend == DNN_BACKEND_OPENCV && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)) { #if defined(HAVE_OPENCL) if (!cv::ocl::useOpenCL()) diff --git a/modules/dnn/src/dnn.cpp b/modules/dnn/src/dnn.cpp index 819ad63..98d6fdc 100644 --- a/modules/dnn/src/dnn.cpp +++ b/modules/dnn/src/dnn.cpp @@ -2252,7 +2252,13 @@ void Net::setPreferableTarget(int targetId) if (IS_DNN_OPENCL_TARGET(targetId)) { #ifndef HAVE_OPENCL - impl->preferableTarget = DNN_TARGET_CPU; +#ifdef HAVE_INF_ENGINE + if (impl->preferableBackend == DNN_BACKEND_OPENCV) +#else + if (impl->preferableBackend == DNN_BACKEND_DEFAULT || + impl->preferableBackend == DNN_BACKEND_OPENCV) +#endif // HAVE_INF_ENGINE + impl->preferableTarget = DNN_TARGET_CPU; #else bool fp16 = ocl::Device::getDefault().isExtensionSupported("cl_khr_fp16"); if (!fp16 && targetId == DNN_TARGET_OPENCL_FP16) diff --git a/modules/dnn/src/op_inf_engine.cpp b/modules/dnn/src/op_inf_engine.cpp index b03a67d..9481fc3 100644 --- a/modules/dnn/src/op_inf_engine.cpp +++ b/modules/dnn/src/op_inf_engine.cpp @@ -361,35 +361,60 @@ void InfEngineBackendNet::initPlugin(InferenceEngine::ICNNNetwork& net) { CV_Assert(!isInitialized()); - static std::map sharedPlugins; - std::string deviceName = InferenceEngine::getDeviceName(targetDevice); - auto pluginIt = sharedPlugins.find(deviceName); - if (pluginIt != sharedPlugins.end()) + try { - enginePtr = pluginIt->second; - } - else - { - enginePtr = InferenceEngine::PluginDispatcher({""}).getSuitablePlugin(targetDevice); - sharedPlugins[deviceName] = enginePtr; - } - plugin = InferenceEngine::InferencePlugin(enginePtr); + static std::map sharedPlugins; + std::string deviceName = InferenceEngine::getDeviceName(targetDevice); + auto pluginIt = sharedPlugins.find(deviceName); + if (pluginIt != sharedPlugins.end()) + { + enginePtr = pluginIt->second; + } + else + { + enginePtr = InferenceEngine::PluginDispatcher({""}).getSuitablePlugin(targetDevice); + sharedPlugins[deviceName] = enginePtr; - if (targetDevice == InferenceEngine::TargetDevice::eCPU) + if (targetDevice == InferenceEngine::TargetDevice::eCPU) + { + std::string suffixes[] = {"_avx2", "_sse4", ""}; + bool haveFeature[] = { + checkHardwareSupport(CPU_AVX2), + checkHardwareSupport(CPU_SSE4_2), + true + }; + for (int i = 0; i < 3; ++i) + { + if (!haveFeature[i]) + continue; + #ifdef _WIN32 + std::string libName = "cpu_extension" + suffixes[i] + ".dll"; + #else + std::string libName = "libcpu_extension" + suffixes[i] + ".so"; + #endif // _WIN32 + try + { + InferenceEngine::IExtensionPtr extension = + InferenceEngine::make_so_pointer(libName); + enginePtr->AddExtension(extension, 0); + break; + } + catch(...) {} + } + // Some of networks can work without a library of extra layers. + } + } + plugin = InferenceEngine::InferencePlugin(enginePtr); + + netExec = plugin.LoadNetwork(net, {}); + infRequest = netExec.CreateInferRequest(); + infRequest.SetInput(inpBlobs); + infRequest.SetOutput(outBlobs); + } + catch (const std::exception& ex) { -#ifdef _WIN32 - InferenceEngine::IExtensionPtr extension = - InferenceEngine::make_so_pointer("cpu_extension.dll"); -#else - InferenceEngine::IExtensionPtr extension = - InferenceEngine::make_so_pointer("libcpu_extension.so"); -#endif // _WIN32 - plugin.AddExtension(extension); + CV_Error(Error::StsAssert, format("Failed to initialize Inference Engine backend: %s", ex.what())); } - netExec = plugin.LoadNetwork(net, {}); - infRequest = netExec.CreateInferRequest(); - infRequest.SetInput(inpBlobs); - infRequest.SetOutput(outBlobs); } bool InfEngineBackendNet::isInitialized() diff --git a/modules/dnn/test/test_backends.cpp b/modules/dnn/test/test_backends.cpp index 88b8a17..6f41610 100644 --- a/modules/dnn/test/test_backends.cpp +++ b/modules/dnn/test/test_backends.cpp @@ -40,7 +40,7 @@ public: std::string halideScheduler = "", double l1 = 0.0, double lInf = 0.0) { - if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL) + if (backend == DNN_BACKEND_OPENCV && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)) { #ifdef HAVE_OPENCL if (!cv::ocl::useOpenCL()) -- 2.7.4