Merge pull request #14827 from YashasSamaga:cuda4dnn-csl-low
[platform/upstream/opencv.git] / modules / dnn / test / test_backends.cpp
1 // This file is part of OpenCV project.
2 // It is subject to the license terms in the LICENSE file found in the top-level directory
3 // of this distribution and at http://opencv.org/license.html.
4 //
5 // Copyright (C) 2018-2019, Intel Corporation, all rights reserved.
6 // Third party copyrights are property of their respective owners.
7
8 #include "test_precomp.hpp"
9 #include "opencv2/core/ocl.hpp"
10
11 namespace opencv_test { namespace {
12
13 class DNNTestNetwork : public DNNTestLayer
14 {
15 public:
16     void processNet(const std::string& weights, const std::string& proto,
17                     Size inpSize, const std::string& outputLayer = "",
18                     const std::string& halideScheduler = "",
19                     double l1 = 0.0, double lInf = 0.0)
20     {
21         // Create a common input blob.
22         int blobSize[] = {1, 3, inpSize.height, inpSize.width};
23         Mat inp(4, blobSize, CV_32FC1);
24         randu(inp, 0.0f, 1.0f);
25
26         processNet(weights, proto, inp, outputLayer, halideScheduler, l1, lInf);
27     }
28
29     void processNet(std::string weights, std::string proto,
30                     Mat inp, const std::string& outputLayer = "",
31                     std::string halideScheduler = "",
32                     double l1 = 0.0, double lInf = 0.0, double detectionConfThresh = 0.2)
33     {
34         checkBackend();
35         l1 = l1 ? l1 : default_l1;
36         lInf = lInf ? lInf : default_lInf;
37
38         weights = findDataFile(weights, false);
39         if (!proto.empty())
40             proto = findDataFile(proto);
41
42         // Create two networks - with default backend and target and a tested one.
43         Net netDefault = readNet(weights, proto);
44         netDefault.setPreferableBackend(DNN_BACKEND_OPENCV);
45         netDefault.setInput(inp);
46         Mat outDefault = netDefault.forward(outputLayer).clone();
47
48         net = readNet(weights, proto);
49         net.setInput(inp);
50         net.setPreferableBackend(backend);
51         net.setPreferableTarget(target);
52         if (backend == DNN_BACKEND_HALIDE && !halideScheduler.empty())
53         {
54             halideScheduler = findDataFile(halideScheduler);
55             net.setHalideScheduler(halideScheduler);
56         }
57         Mat out = net.forward(outputLayer).clone();
58
59         check(outDefault, out, outputLayer, l1, lInf, detectionConfThresh, "First run");
60
61         // Test 2: change input.
62         float* inpData = (float*)inp.data;
63         for (int i = 0; i < inp.size[0] * inp.size[1]; ++i)
64         {
65             Mat slice(inp.size[2], inp.size[3], CV_32F, inpData);
66             cv::flip(slice, slice, 1);
67             inpData += slice.total();
68         }
69         netDefault.setInput(inp);
70         net.setInput(inp);
71         outDefault = netDefault.forward(outputLayer).clone();
72         out = net.forward(outputLayer).clone();
73         check(outDefault, out, outputLayer, l1, lInf, detectionConfThresh, "Second run");
74     }
75
76     void check(Mat& ref, Mat& out, const std::string& outputLayer, double l1, double lInf,
77                double detectionConfThresh, const char* msg)
78     {
79         if (outputLayer == "detection_out")
80         {
81             if (backend == DNN_BACKEND_INFERENCE_ENGINE)
82             {
83                 // Inference Engine produces detections terminated by a row which starts from -1.
84                 out = out.reshape(1, out.total() / 7);
85                 int numDetections = 0;
86                 while (numDetections < out.rows && out.at<float>(numDetections, 0) != -1)
87                 {
88                     numDetections += 1;
89                 }
90                 out = out.rowRange(0, numDetections);
91             }
92             normAssertDetections(ref, out, msg, detectionConfThresh, l1, lInf);
93         }
94         else
95             normAssert(ref, out, msg, l1, lInf);
96     }
97
98     Net net;
99 };
100
101 TEST_P(DNNTestNetwork, AlexNet)
102 {
103     applyTestTag(CV_TEST_TAG_MEMORY_1GB);
104     processNet("dnn/bvlc_alexnet.caffemodel", "dnn/bvlc_alexnet.prototxt",
105                Size(227, 227), "prob",
106                target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_alexnet.yml" :
107                                              "dnn/halide_scheduler_alexnet.yml");
108     expectNoFallbacksFromIE(net);
109     expectNoFallbacksFromCUDA(net);
110 }
111
112 TEST_P(DNNTestNetwork, ResNet_50)
113 {
114     applyTestTag(
115         (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB),
116         CV_TEST_TAG_DEBUG_LONG
117     );
118     processNet("dnn/ResNet-50-model.caffemodel", "dnn/ResNet-50-deploy.prototxt",
119                Size(224, 224), "prob",
120                target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_resnet_50.yml" :
121                                              "dnn/halide_scheduler_resnet_50.yml");
122     expectNoFallbacksFromIE(net);
123     expectNoFallbacksFromCUDA(net);
124 }
125
126 TEST_P(DNNTestNetwork, SqueezeNet_v1_1)
127 {
128     processNet("dnn/squeezenet_v1.1.caffemodel", "dnn/squeezenet_v1.1.prototxt",
129                Size(227, 227), "prob",
130                target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_squeezenet_v1_1.yml" :
131                                              "dnn/halide_scheduler_squeezenet_v1_1.yml");
132     expectNoFallbacksFromIE(net);
133     expectNoFallbacksFromCUDA(net);
134 }
135
136 TEST_P(DNNTestNetwork, GoogLeNet)
137 {
138     applyTestTag(target == DNN_TARGET_CPU ? "" : CV_TEST_TAG_MEMORY_512MB);
139     processNet("dnn/bvlc_googlenet.caffemodel", "dnn/bvlc_googlenet.prototxt",
140                Size(224, 224), "prob");
141     expectNoFallbacksFromIE(net);
142     expectNoFallbacksFromCUDA(net);
143 }
144
145 TEST_P(DNNTestNetwork, Inception_5h)
146 {
147     applyTestTag(CV_TEST_TAG_MEMORY_512MB);
148     double l1 = default_l1, lInf = default_lInf;
149     if (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_CPU || target == DNN_TARGET_OPENCL))
150     {
151         l1 = 1.72e-5;
152         lInf = 8e-4;
153     }
154     processNet("dnn/tensorflow_inception_graph.pb", "", Size(224, 224), "softmax2",
155                target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_inception_5h.yml" :
156                                              "dnn/halide_scheduler_inception_5h.yml",
157                l1, lInf);
158     expectNoFallbacksFromIE(net);
159     expectNoFallbacksFromCUDA(net);
160 }
161
162 TEST_P(DNNTestNetwork, ENet)
163 {
164     applyTestTag(target == DNN_TARGET_CPU ? "" : CV_TEST_TAG_MEMORY_512MB);
165     if (backend == DNN_BACKEND_INFERENCE_ENGINE)
166         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
167     if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
168         applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
169     processNet("dnn/Enet-model-best.net", "", Size(512, 512), "l367_Deconvolution",
170                target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_enet.yml" :
171                                              "dnn/halide_scheduler_enet.yml",
172                2e-5, 0.15);
173     expectNoFallbacksFromCUDA(net);
174 }
175
176 TEST_P(DNNTestNetwork, MobileNet_SSD_Caffe)
177 {
178     applyTestTag(CV_TEST_TAG_MEMORY_512MB);
179     if (backend == DNN_BACKEND_HALIDE)
180         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
181     Mat sample = imread(findDataFile("dnn/street.png"));
182     Mat inp = blobFromImage(sample, 1.0f / 127.5, Size(300, 300), Scalar(127.5, 127.5, 127.5), false);
183     float diffScores = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 1.5e-2 : 0.0;
184     float diffSquares = (target == DNN_TARGET_MYRIAD) ? 0.063  : 0.0;
185     float detectionConfThresh = (target == DNN_TARGET_MYRIAD) ? 0.252  : FLT_MIN;
186          processNet("dnn/MobileNetSSD_deploy.caffemodel", "dnn/MobileNetSSD_deploy.prototxt",
187                     inp, "detection_out", "", diffScores, diffSquares, detectionConfThresh);
188     expectNoFallbacksFromIE(net);
189 }
190
191 TEST_P(DNNTestNetwork, MobileNet_SSD_Caffe_Different_Width_Height)
192 {
193     if (backend == DNN_BACKEND_HALIDE)
194         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
195 #if defined(INF_ENGINE_RELEASE)
196     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
197             && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
198         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
199 #endif
200     Mat sample = imread(findDataFile("dnn/street.png"));
201     Mat inp = blobFromImage(sample, 1.0f / 127.5, Size(300, 560), Scalar(127.5, 127.5, 127.5), false);
202     float diffScores  = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.029 : 0.0;
203     float diffSquares = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.09  : 0.0;
204     processNet("dnn/MobileNetSSD_deploy.caffemodel", "dnn/MobileNetSSD_deploy.prototxt",
205                 inp, "detection_out", "", diffScores, diffSquares);
206     expectNoFallbacksFromIE(net);
207 }
208
209 TEST_P(DNNTestNetwork, MobileNet_SSD_v1_TensorFlow)
210 {
211     applyTestTag(target == DNN_TARGET_CPU ? "" : CV_TEST_TAG_MEMORY_512MB);
212     if (backend == DNN_BACKEND_HALIDE)
213         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
214
215     Mat sample = imread(findDataFile("dnn/street.png"));
216     Mat inp = blobFromImage(sample, 1.0f, Size(300, 300), Scalar(), false);
217     float l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.095 : 0.0;
218     float lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.09 : 0.0;
219     float detectionConfThresh = (target == DNN_TARGET_MYRIAD) ? 0.216 : 0.2;
220     processNet("dnn/ssd_mobilenet_v1_coco_2017_11_17.pb", "dnn/ssd_mobilenet_v1_coco_2017_11_17.pbtxt",
221                inp, "detection_out", "", l1, lInf, detectionConfThresh);
222     expectNoFallbacksFromIE(net);
223 }
224
225 TEST_P(DNNTestNetwork, MobileNet_SSD_v1_TensorFlow_Different_Width_Height)
226 {
227     if (backend == DNN_BACKEND_HALIDE)
228         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
229 #if defined(INF_ENGINE_RELEASE)
230     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
231             && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
232         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
233 #endif
234 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019020000)
235     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
236         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE, CV_TEST_TAG_DNN_SKIP_IE_2019R2);
237 #endif
238
239     Mat sample = imread(findDataFile("dnn/street.png"));
240     Mat inp = blobFromImage(sample, 1.0f, Size(300, 560), Scalar(), false);
241     float l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.012 : 0.0;
242     float lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.06 : 0.0;
243     processNet("dnn/ssd_mobilenet_v1_coco_2017_11_17.pb", "dnn/ssd_mobilenet_v1_coco_2017_11_17.pbtxt",
244                inp, "detection_out", "", l1, lInf);
245     expectNoFallbacksFromIE(net);
246 }
247
248 TEST_P(DNNTestNetwork, MobileNet_SSD_v2_TensorFlow)
249 {
250     applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
251     if (backend == DNN_BACKEND_HALIDE)
252         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
253
254     Mat sample = imread(findDataFile("dnn/street.png"));
255     Mat inp = blobFromImage(sample, 1.0f, Size(300, 300), Scalar(), false);
256     float l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.013 : 2e-5;
257     float lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.062 : 0.0;
258     processNet("dnn/ssd_mobilenet_v2_coco_2018_03_29.pb", "dnn/ssd_mobilenet_v2_coco_2018_03_29.pbtxt",
259                inp, "detection_out", "", l1, lInf, 0.25);
260     expectNoFallbacksFromIE(net);
261 }
262
263 TEST_P(DNNTestNetwork, SSD_VGG16)
264 {
265     applyTestTag(CV_TEST_TAG_LONG, (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_1GB : CV_TEST_TAG_MEMORY_2GB),
266                  CV_TEST_TAG_DEBUG_VERYLONG);
267     if (backend == DNN_BACKEND_HALIDE && target == DNN_TARGET_CPU)
268         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);  // TODO HALIDE_CPU
269     double scoreThreshold = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.0325 : 0.0;
270     const float lInf = (target == DNN_TARGET_MYRIAD) ? 0.032 : 0.0;
271     Mat sample = imread(findDataFile("dnn/street.png"));
272     Mat inp = blobFromImage(sample, 1.0f, Size(300, 300), Scalar(), false);
273     processNet("dnn/VGG_ILSVRC2016_SSD_300x300_iter_440000.caffemodel",
274                "dnn/ssd_vgg16.prototxt", inp, "detection_out", "", scoreThreshold, lInf);
275     expectNoFallbacksFromIE(net);
276 }
277
278 TEST_P(DNNTestNetwork, OpenPose_pose_coco)
279 {
280     applyTestTag(CV_TEST_TAG_LONG, (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_1GB : CV_TEST_TAG_MEMORY_2GB),
281                  CV_TEST_TAG_DEBUG_LONG);
282     if (backend == DNN_BACKEND_HALIDE)
283         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
284 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000)
285     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
286             && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
287         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
288 #endif
289
290     const float l1 = (target == DNN_TARGET_MYRIAD) ? 0.0056 : 0.0;
291     const float lInf = (target == DNN_TARGET_MYRIAD) ? 0.072 : 0.0;
292     processNet("dnn/openpose_pose_coco.caffemodel", "dnn/openpose_pose_coco.prototxt",
293                Size(46, 46), "", "", l1, lInf);
294     expectNoFallbacksFromIE(net);
295     expectNoFallbacksFromCUDA(net);
296 }
297
298 TEST_P(DNNTestNetwork, OpenPose_pose_mpi)
299 {
300     applyTestTag(CV_TEST_TAG_LONG, (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_1GB : CV_TEST_TAG_MEMORY_2GB),
301                  CV_TEST_TAG_DEBUG_VERYLONG);
302     if (backend == DNN_BACKEND_HALIDE)
303         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
304 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000)
305     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
306             && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
307         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
308 #endif
309
310     // output range: [-0.001, 0.97]
311     const float l1 = (target == DNN_TARGET_MYRIAD) ? 0.012 : 0.0;
312     const float lInf = (target == DNN_TARGET_MYRIAD || target == DNN_TARGET_OPENCL_FP16) ? 0.16 : 0.0;
313     processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi.prototxt",
314                Size(46, 46), "", "", l1, lInf);
315     expectNoFallbacksFromIE(net);
316     expectNoFallbacksFromCUDA(net);
317 }
318
319 TEST_P(DNNTestNetwork, OpenPose_pose_mpi_faster_4_stages)
320 {
321     applyTestTag(CV_TEST_TAG_LONG, CV_TEST_TAG_MEMORY_1GB);
322     if (backend == DNN_BACKEND_HALIDE)
323         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
324 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000)
325     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
326             && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
327         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
328 #endif
329
330     // The same .caffemodel but modified .prototxt
331     // See https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/src/openpose/pose/poseParameters.cpp
332     processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi_faster_4_stages.prototxt",
333                Size(46, 46));
334     expectNoFallbacksFromIE(net);
335     expectNoFallbacksFromCUDA(net);
336 }
337
338 TEST_P(DNNTestNetwork, OpenFace)
339 {
340 #if defined(INF_ENGINE_RELEASE)
341 #if INF_ENGINE_VER_MAJOR_EQ(2018050000)
342     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
343         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
344 #endif
345 #endif
346     if (backend == DNN_BACKEND_HALIDE)
347         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
348     const float l1 = (target == DNN_TARGET_MYRIAD) ? 0.0024 : 0.0;
349     const float lInf = (target == DNN_TARGET_MYRIAD) ? 0.0071 : 0.0;
350     processNet("dnn/openface_nn4.small2.v1.t7", "", Size(96, 96), "", "", l1, lInf);
351
352     expectNoFallbacksFromCUDA(net);
353 }
354
355 TEST_P(DNNTestNetwork, opencv_face_detector)
356 {
357     if (backend == DNN_BACKEND_HALIDE)
358         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
359     Mat img = imread(findDataFile("gpu/lbpcascade/er.png"));
360     Mat inp = blobFromImage(img, 1.0, Size(), Scalar(104.0, 177.0, 123.0), false, false);
361     processNet("dnn/opencv_face_detector.caffemodel", "dnn/opencv_face_detector.prototxt",
362                inp, "detection_out");
363     expectNoFallbacksFromIE(net);
364 }
365
366 TEST_P(DNNTestNetwork, Inception_v2_SSD_TensorFlow)
367 {
368     applyTestTag(
369         (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB),
370         CV_TEST_TAG_DEBUG_LONG
371     );
372 #if defined(INF_ENGINE_RELEASE)
373     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
374             && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
375         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
376 #endif
377 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019020000)
378     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
379         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE, CV_TEST_TAG_DNN_SKIP_IE_2019R2);
380 #endif
381     if (backend == DNN_BACKEND_HALIDE)
382         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
383     Mat sample = imread(findDataFile("dnn/street.png"));
384     Mat inp = blobFromImage(sample, 1.0f, Size(300, 300), Scalar(), false);
385     float l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.015 : 0.0;
386     float lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.0731 : 0.0;
387     processNet("dnn/ssd_inception_v2_coco_2017_11_17.pb", "dnn/ssd_inception_v2_coco_2017_11_17.pbtxt",
388                inp, "detection_out", "", l1, lInf);
389     expectNoFallbacksFromIE(net);
390 }
391
392 TEST_P(DNNTestNetwork, DenseNet_121)
393 {
394     applyTestTag(CV_TEST_TAG_MEMORY_512MB);
395     if (backend == DNN_BACKEND_HALIDE)
396         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
397     // Reference output values are in range [-3.807, 4.605]
398     float l1 = 0.0, lInf = 0.0;
399     if (target == DNN_TARGET_OPENCL_FP16)
400     {
401         l1 = 2e-2; lInf = 9e-2;
402     }
403     else if (target == DNN_TARGET_MYRIAD)
404     {
405         l1 = 0.1; lInf = 0.6;
406     }
407     processNet("dnn/DenseNet_121.caffemodel", "dnn/DenseNet_121.prototxt", Size(224, 224), "", "", l1, lInf);
408     if (target != DNN_TARGET_MYRIAD || getInferenceEngineVPUType() != CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
409         expectNoFallbacksFromIE(net);
410     expectNoFallbacksFromCUDA(net);
411 }
412
413 TEST_P(DNNTestNetwork, FastNeuralStyle_eccv16)
414 {
415     applyTestTag(CV_TEST_TAG_MEMORY_512MB, CV_TEST_TAG_DEBUG_VERYLONG);
416
417     if (backend == DNN_BACKEND_HALIDE)
418         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
419     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
420         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
421
422 #if defined(INF_ENGINE_RELEASE)
423 #if INF_ENGINE_VER_MAJOR_LE(2018050000)
424     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL)
425         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
426 #endif
427 #endif
428
429     Mat img = imread(findDataFile("dnn/googlenet_1.png"));
430     Mat inp = blobFromImage(img, 1.0, Size(320, 240), Scalar(103.939, 116.779, 123.68), false, false);
431     // Output image has values in range [-143.526, 148.539].
432     float l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.4 : 4e-5;
433     float lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 7.45 : 2e-3;
434     processNet("dnn/fast_neural_style_eccv16_starry_night.t7", "", inp, "", "", l1, lInf);
435 #if defined(HAVE_INF_ENGINE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
436     expectNoFallbacksFromIE(net);
437 #endif
438     expectNoFallbacksFromCUDA(net);
439 }
440
441 INSTANTIATE_TEST_CASE_P(/*nothing*/, DNNTestNetwork, dnnBackendsAndTargets(true, true, false, true, true));
442
443 }} // namespace