Merge pull request #16010 from YashasSamaga:cuda4dnn-fp16-tests
[platform/upstream/opencv.git] / modules / dnn / test / test_backends.cpp
1 // This file is part of OpenCV project.
2 // It is subject to the license terms in the LICENSE file found in the top-level directory
3 // of this distribution and at http://opencv.org/license.html.
4 //
5 // Copyright (C) 2018-2019, Intel Corporation, all rights reserved.
6 // Third party copyrights are property of their respective owners.
7
8 #include "test_precomp.hpp"
9 #include "opencv2/core/ocl.hpp"
10
11 namespace opencv_test { namespace {
12
13 class DNNTestNetwork : public DNNTestLayer
14 {
15 public:
16     void processNet(const std::string& weights, const std::string& proto,
17                     Size inpSize, const std::string& outputLayer = "",
18                     const std::string& halideScheduler = "",
19                     double l1 = 0.0, double lInf = 0.0)
20     {
21         // Create a common input blob.
22         int blobSize[] = {1, 3, inpSize.height, inpSize.width};
23         Mat inp(4, blobSize, CV_32FC1);
24         randu(inp, 0.0f, 1.0f);
25
26         processNet(weights, proto, inp, outputLayer, halideScheduler, l1, lInf);
27     }
28
29     void processNet(std::string weights, std::string proto,
30                     Mat inp, const std::string& outputLayer = "",
31                     std::string halideScheduler = "",
32                     double l1 = 0.0, double lInf = 0.0, double detectionConfThresh = 0.2)
33     {
34         checkBackend();
35         l1 = l1 ? l1 : default_l1;
36         lInf = lInf ? lInf : default_lInf;
37
38         weights = findDataFile(weights, false);
39         if (!proto.empty())
40             proto = findDataFile(proto);
41
42         // Create two networks - with default backend and target and a tested one.
43         Net netDefault = readNet(weights, proto);
44         netDefault.setPreferableBackend(DNN_BACKEND_OPENCV);
45         netDefault.setInput(inp);
46         Mat outDefault = netDefault.forward(outputLayer).clone();
47
48         net = readNet(weights, proto);
49         net.setInput(inp);
50         net.setPreferableBackend(backend);
51         net.setPreferableTarget(target);
52         if (backend == DNN_BACKEND_HALIDE && !halideScheduler.empty())
53         {
54             halideScheduler = findDataFile(halideScheduler);
55             net.setHalideScheduler(halideScheduler);
56         }
57         Mat out = net.forward(outputLayer).clone();
58
59         check(outDefault, out, outputLayer, l1, lInf, detectionConfThresh, "First run");
60
61         // Test 2: change input.
62         float* inpData = (float*)inp.data;
63         for (int i = 0; i < inp.size[0] * inp.size[1]; ++i)
64         {
65             Mat slice(inp.size[2], inp.size[3], CV_32F, inpData);
66             cv::flip(slice, slice, 1);
67             inpData += slice.total();
68         }
69         netDefault.setInput(inp);
70         net.setInput(inp);
71         outDefault = netDefault.forward(outputLayer).clone();
72         out = net.forward(outputLayer).clone();
73         check(outDefault, out, outputLayer, l1, lInf, detectionConfThresh, "Second run");
74     }
75
76     void check(Mat& ref, Mat& out, const std::string& outputLayer, double l1, double lInf,
77                double detectionConfThresh, const char* msg)
78     {
79         if (outputLayer == "detection_out")
80         {
81             if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
82             {
83                 // Inference Engine produces detections terminated by a row which starts from -1.
84                 out = out.reshape(1, out.total() / 7);
85                 int numDetections = 0;
86                 while (numDetections < out.rows && out.at<float>(numDetections, 0) != -1)
87                 {
88                     numDetections += 1;
89                 }
90                 out = out.rowRange(0, numDetections);
91             }
92             normAssertDetections(ref, out, msg, detectionConfThresh, l1, lInf);
93         }
94         else
95             normAssert(ref, out, msg, l1, lInf);
96     }
97
98     Net net;
99 };
100
101 TEST_P(DNNTestNetwork, AlexNet)
102 {
103     applyTestTag(CV_TEST_TAG_MEMORY_1GB);
104     processNet("dnn/bvlc_alexnet.caffemodel", "dnn/bvlc_alexnet.prototxt",
105                Size(227, 227), "prob",
106                target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_alexnet.yml" :
107                                              "dnn/halide_scheduler_alexnet.yml");
108     expectNoFallbacksFromIE(net);
109     expectNoFallbacksFromCUDA(net);
110 }
111
112 TEST_P(DNNTestNetwork, ResNet_50)
113 {
114     applyTestTag(
115         (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB),
116         CV_TEST_TAG_DEBUG_LONG
117     );
118     processNet("dnn/ResNet-50-model.caffemodel", "dnn/ResNet-50-deploy.prototxt",
119                Size(224, 224), "prob",
120                target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_resnet_50.yml" :
121                                              "dnn/halide_scheduler_resnet_50.yml");
122     expectNoFallbacksFromIE(net);
123     expectNoFallbacksFromCUDA(net);
124 }
125
126 TEST_P(DNNTestNetwork, SqueezeNet_v1_1)
127 {
128     processNet("dnn/squeezenet_v1.1.caffemodel", "dnn/squeezenet_v1.1.prototxt",
129                Size(227, 227), "prob",
130                target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_squeezenet_v1_1.yml" :
131                                              "dnn/halide_scheduler_squeezenet_v1_1.yml");
132     expectNoFallbacksFromIE(net);
133     expectNoFallbacksFromCUDA(net);
134 }
135
136 TEST_P(DNNTestNetwork, GoogLeNet)
137 {
138     applyTestTag(target == DNN_TARGET_CPU ? "" : CV_TEST_TAG_MEMORY_512MB);
139     processNet("dnn/bvlc_googlenet.caffemodel", "dnn/bvlc_googlenet.prototxt",
140                Size(224, 224), "prob");
141     expectNoFallbacksFromIE(net);
142     expectNoFallbacksFromCUDA(net);
143 }
144
145 TEST_P(DNNTestNetwork, Inception_5h)
146 {
147     applyTestTag(CV_TEST_TAG_MEMORY_512MB);
148     double l1 = default_l1, lInf = default_lInf;
149     if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && (target == DNN_TARGET_CPU || target == DNN_TARGET_OPENCL))
150     {
151         l1 = 1.72e-5;
152         lInf = 8e-4;
153     }
154     processNet("dnn/tensorflow_inception_graph.pb", "", Size(224, 224), "softmax2",
155                target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_inception_5h.yml" :
156                                              "dnn/halide_scheduler_inception_5h.yml",
157                l1, lInf);
158     expectNoFallbacksFromIE(net);
159     expectNoFallbacksFromCUDA(net);
160 }
161
162 TEST_P(DNNTestNetwork, ENet)
163 {
164     applyTestTag(target == DNN_TARGET_CPU ? "" : CV_TEST_TAG_MEMORY_512MB);
165     if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
166         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
167     if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
168         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
169     if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
170         applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
171     if (backend == DNN_BACKEND_CUDA && target == DNN_TARGET_CUDA_FP16)
172         applyTestTag(CV_TEST_TAG_DNN_SKIP_CUDA_FP16);
173     processNet("dnn/Enet-model-best.net", "", Size(512, 512), "l367_Deconvolution",
174                target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_enet.yml" :
175                                              "dnn/halide_scheduler_enet.yml",
176                2e-5, 0.15);
177     expectNoFallbacksFromCUDA(net);
178 }
179
180 TEST_P(DNNTestNetwork, MobileNet_SSD_Caffe)
181 {
182     applyTestTag(CV_TEST_TAG_MEMORY_512MB);
183     if (backend == DNN_BACKEND_HALIDE)
184         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
185     Mat sample = imread(findDataFile("dnn/street.png"));
186     Mat inp = blobFromImage(sample, 1.0f / 127.5, Size(300, 300), Scalar(127.5, 127.5, 127.5), false);
187     float scoreDiff = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 1.5e-2 : 0.0;
188     float iouDiff = (target == DNN_TARGET_MYRIAD) ? 0.063  : 0.0;
189     float detectionConfThresh = (target == DNN_TARGET_MYRIAD) ? 0.252  : FLT_MIN;
190          processNet("dnn/MobileNetSSD_deploy.caffemodel", "dnn/MobileNetSSD_deploy.prototxt",
191                     inp, "detection_out", "", scoreDiff, iouDiff, detectionConfThresh);
192     expectNoFallbacksFromIE(net);
193 }
194
195 TEST_P(DNNTestNetwork, MobileNet_SSD_Caffe_Different_Width_Height)
196 {
197     if (backend == DNN_BACKEND_HALIDE)
198         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
199 #if defined(INF_ENGINE_RELEASE)
200     if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
201             && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
202         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
203 #endif
204     Mat sample = imread(findDataFile("dnn/street.png"));
205     Mat inp = blobFromImage(sample, 1.0f / 127.5, Size(300, 560), Scalar(127.5, 127.5, 127.5), false);
206     float scoreDiff = 0.0, iouDiff = 0.0;
207     if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD)
208     {
209         scoreDiff = 0.029;
210         iouDiff = 0.09;
211     }
212     else if (target == DNN_TARGET_CUDA_FP16)
213     {
214         scoreDiff = 0.03;
215         iouDiff = 0.08;
216     }
217     processNet("dnn/MobileNetSSD_deploy.caffemodel", "dnn/MobileNetSSD_deploy.prototxt",
218                 inp, "detection_out", "", scoreDiff, iouDiff);
219     expectNoFallbacksFromIE(net);
220 }
221
222 TEST_P(DNNTestNetwork, MobileNet_SSD_v1_TensorFlow)
223 {
224     applyTestTag(target == DNN_TARGET_CPU ? "" : CV_TEST_TAG_MEMORY_512MB);
225     if (backend == DNN_BACKEND_HALIDE)
226         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
227
228     Mat sample = imread(findDataFile("dnn/street.png"));
229     Mat inp = blobFromImage(sample, 1.0f, Size(300, 300), Scalar(), false);
230     float detectionConfThresh = (target == DNN_TARGET_MYRIAD) ? 0.216 : 0.2;
231     float scoreDiff = 0.0, iouDiff = 0.0;
232     if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD)
233     {
234         scoreDiff = 0.095;
235         iouDiff = 0.09;
236     }
237     else if (target == DNN_TARGET_CUDA_FP16)
238     {
239         scoreDiff = 0.007;
240         iouDiff = 0.08;
241     }
242     processNet("dnn/ssd_mobilenet_v1_coco_2017_11_17.pb", "dnn/ssd_mobilenet_v1_coco_2017_11_17.pbtxt",
243                inp, "detection_out", "", scoreDiff, iouDiff, detectionConfThresh);
244     expectNoFallbacksFromIE(net);
245 }
246
247 TEST_P(DNNTestNetwork, MobileNet_SSD_v1_TensorFlow_Different_Width_Height)
248 {
249     if (backend == DNN_BACKEND_HALIDE)
250         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
251 #if defined(INF_ENGINE_RELEASE)
252     if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
253             && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
254         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
255 #endif
256 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019020000)
257     if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
258         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
259 #endif
260
261     Mat sample = imread(findDataFile("dnn/street.png"));
262     Mat inp = blobFromImage(sample, 1.0f, Size(300, 560), Scalar(), false);
263     float scoreDiff = 0.0, iouDiff = 0.0;
264     if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD)
265     {
266         scoreDiff = 0.012;
267         iouDiff = 0.06;
268     }
269     else if (target == DNN_TARGET_CUDA_FP16)
270     {
271         scoreDiff = 0.007;
272         iouDiff = 0.06;
273     }
274     processNet("dnn/ssd_mobilenet_v1_coco_2017_11_17.pb", "dnn/ssd_mobilenet_v1_coco_2017_11_17.pbtxt",
275                inp, "detection_out", "", scoreDiff, iouDiff);
276     expectNoFallbacksFromIE(net);
277 }
278
279 TEST_P(DNNTestNetwork, MobileNet_SSD_v2_TensorFlow)
280 {
281     applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
282     if (backend == DNN_BACKEND_HALIDE)
283         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
284
285     Mat sample = imread(findDataFile("dnn/street.png"));
286     Mat inp = blobFromImage(sample, 1.0f, Size(300, 300), Scalar(), false);
287     float scoreDiff = 2e-5, iouDiff = 0.0;
288     if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD)
289     {
290         scoreDiff = 0.013;
291         iouDiff = 0.062;
292     }
293     else if (target == DNN_TARGET_CUDA_FP16)
294     {
295         scoreDiff = 0.02;
296         iouDiff = 0.07;
297     }
298     processNet("dnn/ssd_mobilenet_v2_coco_2018_03_29.pb", "dnn/ssd_mobilenet_v2_coco_2018_03_29.pbtxt",
299                inp, "detection_out", "", scoreDiff, iouDiff, 0.25);
300     expectNoFallbacksFromIE(net);
301 }
302
303 TEST_P(DNNTestNetwork, SSD_VGG16)
304 {
305     applyTestTag(CV_TEST_TAG_LONG, (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_1GB : CV_TEST_TAG_MEMORY_2GB),
306                  CV_TEST_TAG_DEBUG_VERYLONG);
307     if (backend == DNN_BACKEND_HALIDE && target == DNN_TARGET_CPU)
308         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);  // TODO HALIDE_CPU
309     Mat sample = imread(findDataFile("dnn/street.png"));
310     Mat inp = blobFromImage(sample, 1.0f, Size(300, 300), Scalar(), false);
311     float scoreDiff = 0.0, iouDiff = 0.0;
312     if (target == DNN_TARGET_OPENCL_FP16)
313     {
314         scoreDiff = 0.0325;
315     }
316     else if (target == DNN_TARGET_MYRIAD)
317     {
318         scoreDiff = 0.0325;
319         iouDiff = 0.032;
320     }
321     else if (target == DNN_TARGET_CUDA_FP16)
322     {
323         scoreDiff = 0.03;
324     }
325
326     processNet("dnn/VGG_ILSVRC2016_SSD_300x300_iter_440000.caffemodel",
327                "dnn/ssd_vgg16.prototxt", inp, "detection_out", "", scoreDiff, iouDiff);
328     expectNoFallbacksFromIE(net);
329 }
330
331 TEST_P(DNNTestNetwork, OpenPose_pose_coco)
332 {
333     applyTestTag(CV_TEST_TAG_LONG, (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_1GB : CV_TEST_TAG_MEMORY_2GB),
334                  CV_TEST_TAG_DEBUG_LONG);
335     if (backend == DNN_BACKEND_HALIDE)
336         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
337 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000)
338     if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
339             && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
340         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
341 #endif
342
343     const float l1 = (target == DNN_TARGET_MYRIAD) ? 0.0056 : 0.0;
344     const float lInf = (target == DNN_TARGET_MYRIAD) ? 0.072 : 0.0;
345     processNet("dnn/openpose_pose_coco.caffemodel", "dnn/openpose_pose_coco.prototxt",
346                Size(46, 46), "", "", l1, lInf);
347     expectNoFallbacksFromIE(net);
348     expectNoFallbacksFromCUDA(net);
349 }
350
351 TEST_P(DNNTestNetwork, OpenPose_pose_mpi)
352 {
353     applyTestTag(CV_TEST_TAG_LONG, (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_1GB : CV_TEST_TAG_MEMORY_2GB),
354                  CV_TEST_TAG_DEBUG_VERYLONG);
355     if (backend == DNN_BACKEND_HALIDE)
356         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
357 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000)
358     if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
359             && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
360         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
361 #endif
362
363     // output range: [-0.001, 0.97]
364     const float l1 = (target == DNN_TARGET_MYRIAD) ? 0.012 : 0.0;
365     const float lInf = (target == DNN_TARGET_MYRIAD || target == DNN_TARGET_OPENCL_FP16) ? 0.16 : 0.0;
366     processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi.prototxt",
367                Size(46, 46), "", "", l1, lInf);
368     expectNoFallbacksFromIE(net);
369     expectNoFallbacksFromCUDA(net);
370 }
371
372 TEST_P(DNNTestNetwork, OpenPose_pose_mpi_faster_4_stages)
373 {
374     applyTestTag(CV_TEST_TAG_LONG, CV_TEST_TAG_MEMORY_1GB);
375     if (backend == DNN_BACKEND_HALIDE)
376         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
377 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000)
378     if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
379             && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
380         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
381 #endif
382
383     // The same .caffemodel but modified .prototxt
384     // See https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/src/openpose/pose/poseParameters.cpp
385     processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi_faster_4_stages.prototxt",
386                Size(46, 46));
387     expectNoFallbacksFromIE(net);
388     expectNoFallbacksFromCUDA(net);
389 }
390
391 TEST_P(DNNTestNetwork, OpenFace)
392 {
393 #if defined(INF_ENGINE_RELEASE)
394 #if INF_ENGINE_VER_MAJOR_EQ(2018050000)
395     if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
396         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
397 #endif
398 #endif
399     if (backend == DNN_BACKEND_HALIDE)
400         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
401     const float l1 = (target == DNN_TARGET_MYRIAD) ? 0.0024 : 0.0;
402     const float lInf = (target == DNN_TARGET_MYRIAD) ? 0.0071 : 0.0;
403     processNet("dnn/openface_nn4.small2.v1.t7", "", Size(96, 96), "", "", l1, lInf);
404
405     expectNoFallbacksFromCUDA(net);
406 }
407
408 TEST_P(DNNTestNetwork, opencv_face_detector)
409 {
410     if (backend == DNN_BACKEND_HALIDE)
411         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
412     Mat img = imread(findDataFile("gpu/lbpcascade/er.png"));
413     Mat inp = blobFromImage(img, 1.0, Size(), Scalar(104.0, 177.0, 123.0), false, false);
414     processNet("dnn/opencv_face_detector.caffemodel", "dnn/opencv_face_detector.prototxt",
415                inp, "detection_out");
416     expectNoFallbacksFromIE(net);
417 }
418
419 TEST_P(DNNTestNetwork, Inception_v2_SSD_TensorFlow)
420 {
421     applyTestTag(
422         (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB),
423         CV_TEST_TAG_DEBUG_LONG
424     );
425 #if defined(INF_ENGINE_RELEASE)
426     if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
427             && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
428         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
429 #endif
430 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019020000)
431     if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
432         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
433 #endif
434     if (backend == DNN_BACKEND_HALIDE)
435         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
436     Mat sample = imread(findDataFile("dnn/street.png"));
437     Mat inp = blobFromImage(sample, 1.0f, Size(300, 300), Scalar(), false);
438     float scoreDiff = 0.0, iouDiff = 0.0;
439     if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD)
440     {
441         scoreDiff = 0.015;
442         iouDiff = 0.0731;
443     }
444     else if (target == DNN_TARGET_CUDA_FP16)
445     {
446         scoreDiff = 0.015;
447         iouDiff = 0.08;
448     }
449     processNet("dnn/ssd_inception_v2_coco_2017_11_17.pb", "dnn/ssd_inception_v2_coco_2017_11_17.pbtxt",
450                inp, "detection_out", "", scoreDiff, iouDiff);
451     expectNoFallbacksFromIE(net);
452 }
453
454 TEST_P(DNNTestNetwork, DenseNet_121)
455 {
456     applyTestTag(CV_TEST_TAG_MEMORY_512MB);
457     if (backend == DNN_BACKEND_HALIDE)
458         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
459     // Reference output values are in range [-3.807, 4.605]
460     float l1 = 0.0, lInf = 0.0;
461     if (target == DNN_TARGET_OPENCL_FP16)
462     {
463         l1 = 2e-2;
464         lInf = 9e-2;
465     }
466     else if (target == DNN_TARGET_MYRIAD)
467     {
468         l1 = 0.1;
469         lInf = 0.6;
470     }
471     else if (target == DNN_TARGET_CUDA_FP16)
472     {
473         l1 = 0.008;
474         lInf = 0.05;
475     }
476     processNet("dnn/DenseNet_121.caffemodel", "dnn/DenseNet_121.prototxt", Size(224, 224), "", "", l1, lInf);
477     if (target != DNN_TARGET_MYRIAD || getInferenceEngineVPUType() != CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
478         expectNoFallbacksFromIE(net);
479     expectNoFallbacksFromCUDA(net);
480 }
481
482 TEST_P(DNNTestNetwork, FastNeuralStyle_eccv16)
483 {
484     applyTestTag(CV_TEST_TAG_MEMORY_512MB, CV_TEST_TAG_DEBUG_VERYLONG);
485
486     if (backend == DNN_BACKEND_HALIDE)
487         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
488     if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
489         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
490
491 #if defined(INF_ENGINE_RELEASE)
492 #if INF_ENGINE_VER_MAJOR_LE(2018050000)
493     if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_OPENCL)
494         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
495 #endif
496 #endif
497
498     Mat img = imread(findDataFile("dnn/googlenet_1.png"));
499     Mat inp = blobFromImage(img, 1.0, Size(320, 240), Scalar(103.939, 116.779, 123.68), false, false);
500     // Output image has values in range [-143.526, 148.539].
501     float l1 = 4e-5, lInf = 2e-3;
502     if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD)
503     {
504         l1 = 0.4;
505         lInf = 7.45;
506     }
507     else if (target == DNN_TARGET_CUDA_FP16)
508     {
509         l1 = 0.3;
510         lInf = 7.2;
511     }
512     processNet("dnn/fast_neural_style_eccv16_starry_night.t7", "", inp, "", "", l1, lInf);
513 #if defined(HAVE_INF_ENGINE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
514     expectNoFallbacksFromIE(net);
515 #endif
516     expectNoFallbacksFromCUDA(net);
517 }
518
519 INSTANTIATE_TEST_CASE_P(/*nothing*/, DNNTestNetwork, dnnBackendsAndTargets(true, true, false, true, true));
520
521 }} // namespace