dnn(test): replace SkipTestException with tags
[platform/upstream/opencv.git] / modules / dnn / test / test_backends.cpp
1 // This file is part of OpenCV project.
2 // It is subject to the license terms in the LICENSE file found in the top-level directory
3 // of this distribution and at http://opencv.org/license.html.
4 //
5 // Copyright (C) 2018-2019, Intel Corporation, all rights reserved.
6 // Third party copyrights are property of their respective owners.
7
8 #include "test_precomp.hpp"
9 #include "opencv2/core/ocl.hpp"
10
11 namespace opencv_test { namespace {
12
13 class DNNTestNetwork : public DNNTestLayer
14 {
15 public:
16     void processNet(const std::string& weights, const std::string& proto,
17                     Size inpSize, const std::string& outputLayer = "",
18                     const std::string& halideScheduler = "",
19                     double l1 = 0.0, double lInf = 0.0)
20     {
21         // Create a common input blob.
22         int blobSize[] = {1, 3, inpSize.height, inpSize.width};
23         Mat inp(4, blobSize, CV_32FC1);
24         randu(inp, 0.0f, 1.0f);
25
26         processNet(weights, proto, inp, outputLayer, halideScheduler, l1, lInf);
27     }
28
29     void processNet(std::string weights, std::string proto,
30                     Mat inp, const std::string& outputLayer = "",
31                     std::string halideScheduler = "",
32                     double l1 = 0.0, double lInf = 0.0, double detectionConfThresh = 0.2)
33     {
34         checkBackend();
35         l1 = l1 ? l1 : default_l1;
36         lInf = lInf ? lInf : default_lInf;
37
38         weights = findDataFile(weights, false);
39         if (!proto.empty())
40             proto = findDataFile(proto);
41
42         // Create two networks - with default backend and target and a tested one.
43         Net netDefault = readNet(weights, proto);
44         netDefault.setPreferableBackend(DNN_BACKEND_OPENCV);
45         netDefault.setInput(inp);
46         Mat outDefault = netDefault.forward(outputLayer).clone();
47
48         net = readNet(weights, proto);
49         net.setInput(inp);
50         net.setPreferableBackend(backend);
51         net.setPreferableTarget(target);
52         if (backend == DNN_BACKEND_HALIDE && !halideScheduler.empty())
53         {
54             halideScheduler = findDataFile(halideScheduler);
55             net.setHalideScheduler(halideScheduler);
56         }
57         Mat out = net.forward(outputLayer).clone();
58
59         check(outDefault, out, outputLayer, l1, lInf, detectionConfThresh, "First run");
60
61         // Test 2: change input.
62         float* inpData = (float*)inp.data;
63         for (int i = 0; i < inp.size[0] * inp.size[1]; ++i)
64         {
65             Mat slice(inp.size[2], inp.size[3], CV_32F, inpData);
66             cv::flip(slice, slice, 1);
67             inpData += slice.total();
68         }
69         netDefault.setInput(inp);
70         net.setInput(inp);
71         outDefault = netDefault.forward(outputLayer).clone();
72         out = net.forward(outputLayer).clone();
73         check(outDefault, out, outputLayer, l1, lInf, detectionConfThresh, "Second run");
74     }
75
76     void check(Mat& ref, Mat& out, const std::string& outputLayer, double l1, double lInf,
77                double detectionConfThresh, const char* msg)
78     {
79         if (outputLayer == "detection_out")
80         {
81             if (backend == DNN_BACKEND_INFERENCE_ENGINE)
82             {
83                 // Inference Engine produces detections terminated by a row which starts from -1.
84                 out = out.reshape(1, out.total() / 7);
85                 int numDetections = 0;
86                 while (numDetections < out.rows && out.at<float>(numDetections, 0) != -1)
87                 {
88                     numDetections += 1;
89                 }
90                 out = out.rowRange(0, numDetections);
91             }
92             normAssertDetections(ref, out, msg, detectionConfThresh, l1, lInf);
93         }
94         else
95             normAssert(ref, out, msg, l1, lInf);
96     }
97
98     Net net;
99 };
100
101 TEST_P(DNNTestNetwork, AlexNet)
102 {
103     applyTestTag(CV_TEST_TAG_MEMORY_1GB);
104     processNet("dnn/bvlc_alexnet.caffemodel", "dnn/bvlc_alexnet.prototxt",
105                Size(227, 227), "prob",
106                target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_alexnet.yml" :
107                                              "dnn/halide_scheduler_alexnet.yml");
108     expectNoFallbacksFromIE(net);
109 }
110
111 TEST_P(DNNTestNetwork, ResNet_50)
112 {
113     applyTestTag(
114         (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB),
115         CV_TEST_TAG_DEBUG_LONG
116     );
117     processNet("dnn/ResNet-50-model.caffemodel", "dnn/ResNet-50-deploy.prototxt",
118                Size(224, 224), "prob",
119                target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_resnet_50.yml" :
120                                              "dnn/halide_scheduler_resnet_50.yml");
121     expectNoFallbacksFromIE(net);
122 }
123
124 TEST_P(DNNTestNetwork, SqueezeNet_v1_1)
125 {
126     processNet("dnn/squeezenet_v1.1.caffemodel", "dnn/squeezenet_v1.1.prototxt",
127                Size(227, 227), "prob",
128                target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_squeezenet_v1_1.yml" :
129                                              "dnn/halide_scheduler_squeezenet_v1_1.yml");
130     expectNoFallbacksFromIE(net);
131 }
132
133 TEST_P(DNNTestNetwork, GoogLeNet)
134 {
135     applyTestTag(target == DNN_TARGET_CPU ? "" : CV_TEST_TAG_MEMORY_512MB);
136     processNet("dnn/bvlc_googlenet.caffemodel", "dnn/bvlc_googlenet.prototxt",
137                Size(224, 224), "prob");
138     expectNoFallbacksFromIE(net);
139 }
140
141 TEST_P(DNNTestNetwork, Inception_5h)
142 {
143     applyTestTag(CV_TEST_TAG_MEMORY_512MB);
144     double l1 = default_l1, lInf = default_lInf;
145     if (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_CPU || target == DNN_TARGET_OPENCL))
146     {
147         l1 = 1.72e-5;
148         lInf = 8e-4;
149     }
150     processNet("dnn/tensorflow_inception_graph.pb", "", Size(224, 224), "softmax2",
151                target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_inception_5h.yml" :
152                                              "dnn/halide_scheduler_inception_5h.yml",
153                l1, lInf);
154     expectNoFallbacksFromIE(net);
155 }
156
157 TEST_P(DNNTestNetwork, ENet)
158 {
159     applyTestTag(target == DNN_TARGET_CPU ? "" : CV_TEST_TAG_MEMORY_512MB);
160     if (backend == DNN_BACKEND_INFERENCE_ENGINE)
161         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
162     if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
163         applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
164     processNet("dnn/Enet-model-best.net", "", Size(512, 512), "l367_Deconvolution",
165                target == DNN_TARGET_OPENCL ? "dnn/halide_scheduler_opencl_enet.yml" :
166                                              "dnn/halide_scheduler_enet.yml",
167                2e-5, 0.15);
168 }
169
170 TEST_P(DNNTestNetwork, MobileNet_SSD_Caffe)
171 {
172     applyTestTag(CV_TEST_TAG_MEMORY_512MB);
173     if (backend == DNN_BACKEND_HALIDE)
174         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
175     Mat sample = imread(findDataFile("dnn/street.png"));
176     Mat inp = blobFromImage(sample, 1.0f / 127.5, Size(300, 300), Scalar(127.5, 127.5, 127.5), false);
177     float diffScores = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 1.5e-2 : 0.0;
178     float diffSquares = (target == DNN_TARGET_MYRIAD) ? 0.063  : 0.0;
179     float detectionConfThresh = (target == DNN_TARGET_MYRIAD) ? 0.252  : FLT_MIN;
180          processNet("dnn/MobileNetSSD_deploy.caffemodel", "dnn/MobileNetSSD_deploy.prototxt",
181                     inp, "detection_out", "", diffScores, diffSquares, detectionConfThresh);
182     expectNoFallbacksFromIE(net);
183 }
184
185 TEST_P(DNNTestNetwork, MobileNet_SSD_Caffe_Different_Width_Height)
186 {
187     if (backend == DNN_BACKEND_HALIDE)
188         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
189 #if defined(INF_ENGINE_RELEASE)
190     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
191             && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
192         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
193 #endif
194     Mat sample = imread(findDataFile("dnn/street.png"));
195     Mat inp = blobFromImage(sample, 1.0f / 127.5, Size(300, 560), Scalar(127.5, 127.5, 127.5), false);
196     float diffScores  = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.029 : 0.0;
197     float diffSquares = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.09  : 0.0;
198     processNet("dnn/MobileNetSSD_deploy.caffemodel", "dnn/MobileNetSSD_deploy.prototxt",
199                 inp, "detection_out", "", diffScores, diffSquares);
200     expectNoFallbacksFromIE(net);
201 }
202
203 TEST_P(DNNTestNetwork, MobileNet_SSD_v1_TensorFlow)
204 {
205     applyTestTag(target == DNN_TARGET_CPU ? "" : CV_TEST_TAG_MEMORY_512MB);
206     if (backend == DNN_BACKEND_HALIDE)
207         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
208     Mat sample = imread(findDataFile("dnn/street.png"));
209     Mat inp = blobFromImage(sample, 1.0f, Size(300, 300), Scalar(), false);
210     float l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.095 : 0.0;
211     float lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.09 : 0.0;
212     float detectionConfThresh = (target == DNN_TARGET_MYRIAD) ? 0.216 : 0.2;
213     processNet("dnn/ssd_mobilenet_v1_coco_2017_11_17.pb", "dnn/ssd_mobilenet_v1_coco_2017_11_17.pbtxt",
214                inp, "detection_out", "", l1, lInf, detectionConfThresh);
215     expectNoFallbacksFromIE(net);
216 }
217
218 TEST_P(DNNTestNetwork, MobileNet_SSD_v1_TensorFlow_Different_Width_Height)
219 {
220     if (backend == DNN_BACKEND_HALIDE)
221         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
222 #if defined(INF_ENGINE_RELEASE)
223     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
224             && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
225         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
226 #endif
227     Mat sample = imread(findDataFile("dnn/street.png"));
228     Mat inp = blobFromImage(sample, 1.0f, Size(300, 560), Scalar(), false);
229     float l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.012 : 0.0;
230     float lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.06 : 0.0;
231     processNet("dnn/ssd_mobilenet_v1_coco_2017_11_17.pb", "dnn/ssd_mobilenet_v1_coco_2017_11_17.pbtxt",
232                inp, "detection_out", "", l1, lInf);
233     expectNoFallbacksFromIE(net);
234 }
235
236 TEST_P(DNNTestNetwork, MobileNet_SSD_v2_TensorFlow)
237 {
238     applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
239     if (backend == DNN_BACKEND_HALIDE)
240         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
241     Mat sample = imread(findDataFile("dnn/street.png"));
242     Mat inp = blobFromImage(sample, 1.0f, Size(300, 300), Scalar(), false);
243     float l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.013 : 2e-5;
244     float lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.062 : 0.0;
245     processNet("dnn/ssd_mobilenet_v2_coco_2018_03_29.pb", "dnn/ssd_mobilenet_v2_coco_2018_03_29.pbtxt",
246                inp, "detection_out", "", l1, lInf, 0.25);
247     expectNoFallbacksFromIE(net);
248 }
249
250 TEST_P(DNNTestNetwork, SSD_VGG16)
251 {
252     applyTestTag(CV_TEST_TAG_LONG, (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_1GB : CV_TEST_TAG_MEMORY_2GB),
253                  CV_TEST_TAG_DEBUG_VERYLONG);
254     if (backend == DNN_BACKEND_HALIDE && target == DNN_TARGET_CPU)
255         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);  // TODO HALIDE_CPU
256     double scoreThreshold = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.0325 : 0.0;
257     const float lInf = (target == DNN_TARGET_MYRIAD) ? 0.032 : 0.0;
258     Mat sample = imread(findDataFile("dnn/street.png"));
259     Mat inp = blobFromImage(sample, 1.0f, Size(300, 300), Scalar(), false);
260     processNet("dnn/VGG_ILSVRC2016_SSD_300x300_iter_440000.caffemodel",
261                "dnn/ssd_vgg16.prototxt", inp, "detection_out", "", scoreThreshold, lInf);
262     expectNoFallbacksFromIE(net);
263 }
264
265 TEST_P(DNNTestNetwork, OpenPose_pose_coco)
266 {
267     applyTestTag(CV_TEST_TAG_LONG, (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_1GB : CV_TEST_TAG_MEMORY_2GB),
268                  CV_TEST_TAG_DEBUG_LONG);
269     if (backend == DNN_BACKEND_HALIDE)
270         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
271 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000)
272     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
273             && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
274         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
275 #endif
276
277     const float l1 = (target == DNN_TARGET_MYRIAD) ? 0.0056 : 0.0;
278     const float lInf = (target == DNN_TARGET_MYRIAD) ? 0.072 : 0.0;
279     processNet("dnn/openpose_pose_coco.caffemodel", "dnn/openpose_pose_coco.prototxt",
280                Size(46, 46), "", "", l1, lInf);
281     expectNoFallbacksFromIE(net);
282 }
283
284 TEST_P(DNNTestNetwork, OpenPose_pose_mpi)
285 {
286     applyTestTag(CV_TEST_TAG_LONG, (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_1GB : CV_TEST_TAG_MEMORY_2GB),
287                  CV_TEST_TAG_DEBUG_VERYLONG);
288     if (backend == DNN_BACKEND_HALIDE)
289         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
290 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000)
291     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
292             && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
293         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
294 #endif
295
296     // output range: [-0.001, 0.97]
297     const float l1 = (target == DNN_TARGET_MYRIAD) ? 0.012 : 0.0;
298     const float lInf = (target == DNN_TARGET_MYRIAD || target == DNN_TARGET_OPENCL_FP16) ? 0.16 : 0.0;
299     processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi.prototxt",
300                Size(46, 46), "", "", l1, lInf);
301     expectNoFallbacksFromIE(net);
302 }
303
304 TEST_P(DNNTestNetwork, OpenPose_pose_mpi_faster_4_stages)
305 {
306     applyTestTag(CV_TEST_TAG_LONG, CV_TEST_TAG_MEMORY_1GB);
307     if (backend == DNN_BACKEND_HALIDE)
308         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
309 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000)
310     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
311             && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
312         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
313 #endif
314
315     // The same .caffemodel but modified .prototxt
316     // See https://github.com/CMU-Perceptual-Computing-Lab/openpose/blob/master/src/openpose/pose/poseParameters.cpp
317     processNet("dnn/openpose_pose_mpi.caffemodel", "dnn/openpose_pose_mpi_faster_4_stages.prototxt",
318                Size(46, 46));
319     expectNoFallbacksFromIE(net);
320 }
321
322 TEST_P(DNNTestNetwork, OpenFace)
323 {
324 #if defined(INF_ENGINE_RELEASE)
325 #if INF_ENGINE_VER_MAJOR_EQ(2018050000)
326     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
327         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
328 #endif
329 #endif
330     if (backend == DNN_BACKEND_HALIDE)
331         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
332     const float l1 = (target == DNN_TARGET_MYRIAD) ? 0.0024 : 0.0;
333     const float lInf = (target == DNN_TARGET_MYRIAD) ? 0.0071 : 0.0;
334     processNet("dnn/openface_nn4.small2.v1.t7", "", Size(96, 96), "", "", l1, lInf);
335 }
336
337 TEST_P(DNNTestNetwork, opencv_face_detector)
338 {
339     if (backend == DNN_BACKEND_HALIDE)
340         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
341     Mat img = imread(findDataFile("gpu/lbpcascade/er.png"));
342     Mat inp = blobFromImage(img, 1.0, Size(), Scalar(104.0, 177.0, 123.0), false, false);
343     processNet("dnn/opencv_face_detector.caffemodel", "dnn/opencv_face_detector.prototxt",
344                inp, "detection_out");
345     expectNoFallbacksFromIE(net);
346 }
347
348 TEST_P(DNNTestNetwork, Inception_v2_SSD_TensorFlow)
349 {
350     applyTestTag(
351         (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB),
352         CV_TEST_TAG_DEBUG_LONG
353     );
354 #if defined(INF_ENGINE_RELEASE)
355     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
356             && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
357         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
358 #endif
359     if (backend == DNN_BACKEND_HALIDE)
360         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
361     Mat sample = imread(findDataFile("dnn/street.png"));
362     Mat inp = blobFromImage(sample, 1.0f, Size(300, 300), Scalar(), false);
363     float l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.015 : 0.0;
364     float lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.0731 : 0.0;
365     processNet("dnn/ssd_inception_v2_coco_2017_11_17.pb", "dnn/ssd_inception_v2_coco_2017_11_17.pbtxt",
366                inp, "detection_out", "", l1, lInf);
367     expectNoFallbacksFromIE(net);
368 }
369
370 TEST_P(DNNTestNetwork, DenseNet_121)
371 {
372     applyTestTag(CV_TEST_TAG_MEMORY_512MB);
373     if (backend == DNN_BACKEND_HALIDE)
374         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
375     // Reference output values are in range [-3.807, 4.605]
376     float l1 = 0.0, lInf = 0.0;
377     if (target == DNN_TARGET_OPENCL_FP16)
378     {
379         l1 = 9e-3; lInf = 5e-2;
380     }
381     else if (target == DNN_TARGET_MYRIAD)
382     {
383         l1 = 0.1; lInf = 0.6;
384     }
385     processNet("dnn/DenseNet_121.caffemodel", "dnn/DenseNet_121.prototxt", Size(224, 224), "", "", l1, lInf);
386     expectNoFallbacksFromIE(net);
387 }
388
389 TEST_P(DNNTestNetwork, FastNeuralStyle_eccv16)
390 {
391     applyTestTag(CV_TEST_TAG_MEMORY_512MB, CV_TEST_TAG_DEBUG_VERYLONG);
392
393     if (backend == DNN_BACKEND_HALIDE)
394         applyTestTag(CV_TEST_TAG_DNN_SKIP_HALIDE);
395     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
396         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
397
398 #if defined(INF_ENGINE_RELEASE)
399 #if INF_ENGINE_VER_MAJOR_LE(2018050000)
400     if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL)
401         applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
402 #endif
403 #endif
404
405     Mat img = imread(findDataFile("dnn/googlenet_1.png"));
406     Mat inp = blobFromImage(img, 1.0, Size(320, 240), Scalar(103.939, 116.779, 123.68), false, false);
407     // Output image has values in range [-143.526, 148.539].
408     float l1 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 0.4 : 4e-5;
409     float lInf = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? 7.45 : 2e-3;
410     processNet("dnn/fast_neural_style_eccv16_starry_night.t7", "", inp, "", "", l1, lInf);
411 #if defined(HAVE_INF_ENGINE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
412     expectNoFallbacksFromIE(net);
413 #endif
414 }
415
416 INSTANTIATE_TEST_CASE_P(/*nothing*/, DNNTestNetwork, dnnBackendsAndTargets(true, true, false));
417
418 }} // namespace