Wrap Inference Engine init to try-catch
authorDmitry Kurtaev <dmitry.kurtaev+github@gmail.com>
Tue, 5 Jun 2018 14:18:14 +0000 (17:18 +0300)
committerDmitry Kurtaev <dmitry.kurtaev+github@gmail.com>
Thu, 7 Jun 2018 09:55:52 +0000 (12:55 +0300)
modules/dnn/perf/perf_net.cpp
modules/dnn/src/dnn.cpp
modules/dnn/src/op_inf_engine.cpp
modules/dnn/test/test_backends.cpp

index 8507a21..fff74df 100644 (file)
@@ -34,7 +34,7 @@ public:
     void processNet(std::string weights, std::string proto, std::string halide_scheduler,
                     const Mat& input, const std::string& outputLayer = "")
     {
-        if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL)
+        if (backend == DNN_BACKEND_OPENCV && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
         {
 #if defined(HAVE_OPENCL)
             if (!cv::ocl::useOpenCL())
index 819ad63..98d6fdc 100644 (file)
@@ -2252,7 +2252,13 @@ void Net::setPreferableTarget(int targetId)
         if (IS_DNN_OPENCL_TARGET(targetId))
         {
 #ifndef HAVE_OPENCL
-            impl->preferableTarget = DNN_TARGET_CPU;
+#ifdef HAVE_INF_ENGINE
+            if (impl->preferableBackend == DNN_BACKEND_OPENCV)
+#else
+            if (impl->preferableBackend == DNN_BACKEND_DEFAULT ||
+                impl->preferableBackend == DNN_BACKEND_OPENCV)
+#endif  // HAVE_INF_ENGINE
+                impl->preferableTarget = DNN_TARGET_CPU;
 #else
             bool fp16 = ocl::Device::getDefault().isExtensionSupported("cl_khr_fp16");
             if (!fp16 && targetId == DNN_TARGET_OPENCL_FP16)
index b03a67d..9481fc3 100644 (file)
@@ -361,35 +361,60 @@ void InfEngineBackendNet::initPlugin(InferenceEngine::ICNNNetwork& net)
 {
     CV_Assert(!isInitialized());
 
-    static std::map<std::string, InferenceEngine::InferenceEnginePluginPtr> sharedPlugins;
-    std::string deviceName = InferenceEngine::getDeviceName(targetDevice);
-    auto pluginIt = sharedPlugins.find(deviceName);
-    if (pluginIt != sharedPlugins.end())
+    try
     {
-        enginePtr = pluginIt->second;
-    }
-    else
-    {
-        enginePtr = InferenceEngine::PluginDispatcher({""}).getSuitablePlugin(targetDevice);
-        sharedPlugins[deviceName] = enginePtr;
-    }
-    plugin = InferenceEngine::InferencePlugin(enginePtr);
+        static std::map<std::string, InferenceEngine::InferenceEnginePluginPtr> sharedPlugins;
+        std::string deviceName = InferenceEngine::getDeviceName(targetDevice);
+        auto pluginIt = sharedPlugins.find(deviceName);
+        if (pluginIt != sharedPlugins.end())
+        {
+            enginePtr = pluginIt->second;
+        }
+        else
+        {
+            enginePtr = InferenceEngine::PluginDispatcher({""}).getSuitablePlugin(targetDevice);
+            sharedPlugins[deviceName] = enginePtr;
 
-    if (targetDevice == InferenceEngine::TargetDevice::eCPU)
+            if (targetDevice == InferenceEngine::TargetDevice::eCPU)
+            {
+                std::string suffixes[] = {"_avx2", "_sse4", ""};
+                bool haveFeature[] = {
+                    checkHardwareSupport(CPU_AVX2),
+                    checkHardwareSupport(CPU_SSE4_2),
+                    true
+                };
+                for (int i = 0; i < 3; ++i)
+                {
+                    if (!haveFeature[i])
+                        continue;
+    #ifdef _WIN32
+                    std::string libName = "cpu_extension" + suffixes[i] + ".dll";
+    #else
+                    std::string libName = "libcpu_extension" + suffixes[i] + ".so";
+    #endif  // _WIN32
+                    try
+                    {
+                        InferenceEngine::IExtensionPtr extension =
+                            InferenceEngine::make_so_pointer<InferenceEngine::IExtension>(libName);
+                        enginePtr->AddExtension(extension, 0);
+                        break;
+                    }
+                    catch(...) {}
+                }
+                // Some of networks can work without a library of extra layers.
+            }
+        }
+        plugin = InferenceEngine::InferencePlugin(enginePtr);
+
+        netExec = plugin.LoadNetwork(net, {});
+        infRequest = netExec.CreateInferRequest();
+        infRequest.SetInput(inpBlobs);
+        infRequest.SetOutput(outBlobs);
+    }
+    catch (const std::exception& ex)
     {
-#ifdef _WIN32
-        InferenceEngine::IExtensionPtr extension =
-            InferenceEngine::make_so_pointer<InferenceEngine::IExtension>("cpu_extension.dll");
-#else
-        InferenceEngine::IExtensionPtr extension =
-            InferenceEngine::make_so_pointer<InferenceEngine::IExtension>("libcpu_extension.so");
-#endif  // _WIN32
-        plugin.AddExtension(extension);
+        CV_Error(Error::StsAssert, format("Failed to initialize Inference Engine backend: %s", ex.what()));
     }
-    netExec = plugin.LoadNetwork(net, {});
-    infRequest = netExec.CreateInferRequest();
-    infRequest.SetInput(inpBlobs);
-    infRequest.SetOutput(outBlobs);
 }
 
 bool InfEngineBackendNet::isInitialized()
index 88b8a17..6f41610 100644 (file)
@@ -40,7 +40,7 @@ public:
                     std::string halideScheduler = "",
                     double l1 = 0.0, double lInf = 0.0)
     {
-        if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL)
+        if (backend == DNN_BACKEND_OPENCV && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
         {
 #ifdef HAVE_OPENCL
             if (!cv::ocl::useOpenCL())