From 84ce2cc21179d9dbe633cec2da16598062787a59 Mon Sep 17 00:00:00 2001 From: Dmitry Kurtaev Date: Mon, 26 Nov 2018 12:09:50 +0300 Subject: [PATCH] Enable some dnn tests according to the new Intel's Inference Engine release (R4) --- modules/dnn/src/layers/elementwise_layers.cpp | 22 +++++++++++++------- modules/dnn/test/test_backends.cpp | 29 +++++++++++++++++---------- modules/dnn/test/test_darknet_importer.cpp | 4 +++- modules/dnn/test/test_halide_layers.cpp | 4 +++- modules/dnn/test/test_layers.cpp | 15 +++++++------- 5 files changed, 46 insertions(+), 28 deletions(-) diff --git a/modules/dnn/src/layers/elementwise_layers.cpp b/modules/dnn/src/layers/elementwise_layers.cpp index 16874ff..dea7c6c 100644 --- a/modules/dnn/src/layers/elementwise_layers.cpp +++ b/modules/dnn/src/layers/elementwise_layers.cpp @@ -700,7 +700,8 @@ struct AbsValFunctor bool supportBackend(int backendId, int) { - return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE; + return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE || + backendId == DNN_BACKEND_INFERENCE_ENGINE; } void apply(const float* srcptr, float* dstptr, int len, size_t planeSize, int cn0, int cn1) const @@ -754,8 +755,11 @@ struct AbsValFunctor #ifdef HAVE_INF_ENGINE InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) { - CV_Error(Error::StsNotImplemented, "Abs"); - return InferenceEngine::CNNLayerPtr(); + lp.type = "ReLU"; + std::shared_ptr ieLayer(new InferenceEngine::ReLULayer(lp)); + ieLayer->negative_slope = -1; + ieLayer->params["negative_slope"] = "-1.0"; + return ieLayer; } #endif // HAVE_INF_ENGINE @@ -832,7 +836,7 @@ struct PowerFunctor bool supportBackend(int backendId, int targetId) { if (backendId == DNN_BACKEND_INFERENCE_ENGINE) - return (targetId != DNN_TARGET_OPENCL && targetId != DNN_TARGET_OPENCL_FP16) || power == 1.0; + return (targetId != DNN_TARGET_OPENCL && targetId != DNN_TARGET_OPENCL_FP16) || power == 1.0 || power == 0.5; else return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE; } @@ -978,7 +982,8 @@ struct ChannelsPReLUFunctor bool supportBackend(int backendId, int) { - return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE; + return backendId == DNN_BACKEND_OPENCV || backendId == DNN_BACKEND_HALIDE || + backendId == DNN_BACKEND_INFERENCE_ENGINE; } void apply(const float* srcptr, float* dstptr, int len, size_t planeSize, int cn0, int cn1) const @@ -1064,8 +1069,11 @@ struct ChannelsPReLUFunctor #ifdef HAVE_INF_ENGINE InferenceEngine::CNNLayerPtr initInfEngine(InferenceEngine::LayerParams& lp) { - CV_Error(Error::StsNotImplemented, "PReLU"); - return InferenceEngine::CNNLayerPtr(); + lp.type = "PReLU"; + std::shared_ptr ieLayer(new InferenceEngine::PReLULayer(lp)); + const size_t numChannels = scale.total(); + ieLayer->_weights = wrapToInfEngineBlob(scale, {numChannels}, InferenceEngine::Layout::C); + return ieLayer; } #endif // HAVE_INF_ENGINE diff --git a/modules/dnn/test/test_backends.cpp b/modules/dnn/test/test_backends.cpp index a18b25e..a1216a5 100644 --- a/modules/dnn/test/test_backends.cpp +++ b/modules/dnn/test/test_backends.cpp @@ -128,10 +128,16 @@ TEST_P(DNNTestNetwork, GoogLeNet) TEST_P(DNNTestNetwork, Inception_5h) { - if (backend == DNN_BACKEND_INFERENCE_ENGINE) throw SkipTestException(""); + double l1 = default_l1, lInf = default_lInf; + if (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_CPU || target == DNN_TARGET_OPENCL)) + { + l1 = 1.72e-5; + lInf = 8e-4; + } processNet("dnn/tensorflow_inception_graph.pb", "", Size(224, 224), "softmax2", target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_inception_5h.yml" : - "dnn/halide_scheduler_inception_5h.yml"); + "dnn/halide_scheduler_inception_5h.yml", + l1, lInf); } TEST_P(DNNTestNetwork, ENet) @@ -193,8 +199,7 @@ TEST_P(DNNTestNetwork, SSD_VGG16) TEST_P(DNNTestNetwork, OpenPose_pose_coco) { - if (backend == DNN_BACKEND_HALIDE || - (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)) + if (backend == DNN_BACKEND_HALIDE) throw SkipTestException(""); processNet("dnn/openpose_pose_coco.caffemodel", "dnn/openpose_pose_coco.prototxt", Size(46, 46)); @@ -202,8 +207,7 @@ TEST_P(DNNTestNetwork, OpenPose_pose_coco) TEST_P(DNNTestNetwork, OpenPose_pose_mpi) { - if (backend == DNN_BACKEND_HALIDE || - (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)) + if (backend == DNN_BACKEND_HALIDE) throw SkipTestException(""); processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi.prototxt", Size(46, 46)); @@ -211,8 +215,7 @@ TEST_P(DNNTestNetwork, OpenPose_pose_mpi) TEST_P(DNNTestNetwork, OpenPose_pose_mpi_faster_4_stages) { - if (backend == DNN_BACKEND_HALIDE || - (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)) + if (backend == DNN_BACKEND_HALIDE) throw SkipTestException(""); // The same .caffemodel but modified .prototxt // See https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/src/openpose/pose/poseParameters.cpp @@ -222,12 +225,16 @@ TEST_P(DNNTestNetwork, OpenPose_pose_mpi_faster_4_stages) TEST_P(DNNTestNetwork, OpenFace) { -#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE < 2018030000 +#if defined(INF_ENGINE_RELEASE) +#if INF_ENGINE_RELEASE < 2018030000 if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) throw SkipTestException("Test is enabled starts from OpenVINO 2018R3"); +#elif INF_ENGINE_RELEASE < 2018040000 + if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16) + throw SkipTestException("Test is enabled starts from OpenVINO 2018R4"); #endif - if (backend == DNN_BACKEND_HALIDE || - (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)) +#endif + if (backend == DNN_BACKEND_HALIDE) throw SkipTestException(""); processNet("dnn/openface_nn4.small2.v1.t7", "", Size(96, 96), ""); } diff --git a/modules/dnn/test/test_darknet_importer.cpp b/modules/dnn/test/test_darknet_importer.cpp index 415e778..d7c14f2 100644 --- a/modules/dnn/test/test_darknet_importer.cpp +++ b/modules/dnn/test/test_darknet_importer.cpp @@ -347,8 +347,10 @@ INSTANTIATE_TEST_CASE_P(/**/, Test_Darknet_nets, dnnBackendsAndTargets()); TEST_P(Test_Darknet_layers, shortcut) { +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE < 2018040000 if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_CPU) - throw SkipTestException(""); + throw SkipTestException("Test is enabled starts from OpenVINO 2018R4"); +#endif testDarknetLayer("shortcut"); } diff --git a/modules/dnn/test/test_halide_layers.cpp b/modules/dnn/test/test_halide_layers.cpp index 082cf62..ea5eafd 100644 --- a/modules/dnn/test/test_halide_layers.cpp +++ b/modules/dnn/test/test_halide_layers.cpp @@ -273,9 +273,11 @@ TEST_P(AvePooling, Accuracy) Size stride = get<3>(GetParam()); Backend backendId = get<0>(get<4>(GetParam())); Target targetId = get<1>(get<4>(GetParam())); +#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE < 2018040000 if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD && stride == Size(3, 2) && kernel == Size(3, 3) && outSize != Size(1, 1)) - throw SkipTestException(""); + throw SkipTestException("Test is enabled starts from OpenVINO 2018R4"); +#endif const int inWidth = (outSize.width - 1) * stride.width + kernel.width; const int inHeight = (outSize.height - 1) * stride.height + kernel.height; diff --git a/modules/dnn/test/test_layers.cpp b/modules/dnn/test/test_layers.cpp index 2b21485..1d41daa 100644 --- a/modules/dnn/test/test_layers.cpp +++ b/modules/dnn/test/test_layers.cpp @@ -243,9 +243,14 @@ TEST_P(Test_Caffe_layers, Concat) TEST_P(Test_Caffe_layers, Fused_Concat) { - if ((backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_CPU) || - (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL)) +#if defined(INF_ENGINE_RELEASE) + if (backend == DNN_BACKEND_INFERENCE_ENGINE) + { + if (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16 || + (INF_ENGINE_RELEASE < 2018040000 && target == DNN_TARGET_CPU)) throw SkipTestException(""); + } +#endif checkBackend(); // Test case @@ -349,12 +354,6 @@ TEST_P(Test_Caffe_layers, Reshape_Split_Slice) TEST_P(Test_Caffe_layers, Conv_Elu) { - if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) - { - if (!checkIETarget(DNN_TARGET_MYRIAD)) - throw SkipTestException("Myriad is not available/disabled in OpenCV"); - } - Net net = readNetFromTensorflow(_tf("layer_elu_model.pb")); ASSERT_FALSE(net.empty()); -- 2.7.4