1 // This file is part of OpenCV project.
2 // It is subject to the license terms in the LICENSE file found in the top-level directory
3 // of this distribution and at http://opencv.org/license.html.
5 // Copyright (C) 2018-2019, Intel Corporation, all rights reserved.
6 // Third party copyrights are property of their respective owners.
9 #include "test_precomp.hpp"
10 #include "npy_blob.hpp"
11 #include <opencv2/dnn/shape_utils.hpp>
12 namespace opencv_test { namespace {
14 template<typename TString>
15 static std::string _tf(TString filename, bool required = true)
17 return findDataFile(std::string("dnn/onnx/") + filename, required);
20 class Test_ONNX_layers : public DNNTestLayer
25 Test_ONNX_layers() : required(true) { }
33 void testONNXModels(const String& basename, const Extension ext = npy,
34 const double l1 = 0, const float lInf = 0, const bool useSoftmax = false,
35 bool checkNoFallbacks = true, int numInps = 1)
37 String onnxmodel = _tf("models/" + basename + ".onnx", required);
38 std::vector<Mat> inps(numInps);
41 for (int i = 0; i < numInps; ++i)
42 inps[i] = blobFromNPY(_tf("data/input_" + basename + (numInps > 1 ? format("_%d", i) : "") + ".npy"));
43 ref = blobFromNPY(_tf("data/output_" + basename + ".npy"));
46 for (int i = 0; i < numInps; ++i)
47 inps[i] = readTensorFromONNX(_tf("data/input_" + basename + (numInps > 1 ? format("_%d", i) : "") + ".pb"));
48 ref = readTensorFromONNX(_tf("data/output_" + basename + ".pb"));
51 CV_Error(Error::StsUnsupportedFormat, "Unsupported extension");
53 checkBackend(&inps[0], &ref);
54 Net net = readNetFromONNX(onnxmodel);
55 ASSERT_FALSE(net.empty());
57 net.setPreferableBackend(backend);
58 net.setPreferableTarget(target);
60 std::vector<String> inputNames;
61 for (int i = 0; i < numInps; ++i)
62 inputNames.push_back(format("%d", i));
63 net.setInputsNames(inputNames);
65 for (int i = 0; i < numInps; ++i)
66 net.setInput(inps[i], inputNames[i]);
67 Mat out = net.forward("");
73 netSoftmax.addLayerToPrev("softmaxLayer", "Softmax", lp);
74 netSoftmax.setPreferableBackend(DNN_BACKEND_OPENCV);
76 netSoftmax.setInput(out);
77 out = netSoftmax.forward();
79 netSoftmax.setInput(ref);
80 ref = netSoftmax.forward();
82 normAssert(ref, out, "", l1 ? l1 : default_l1, lInf ? lInf : default_lInf);
84 expectNoFallbacksFromIE(net);
88 TEST_P(Test_ONNX_layers, InstanceNorm)
90 if(backend == DNN_BACKEND_CUDA)
91 applyTestTag(CV_TEST_TAG_DNN_SKIP_CUDA); /* MVN is not supported */
93 if (target == DNN_TARGET_MYRIAD)
94 testONNXModels("instancenorm", npy, 0, 0, false, false);
96 testONNXModels("instancenorm", npy);
99 TEST_P(Test_ONNX_layers, MaxPooling)
101 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2020020000)
102 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_MYRIAD)
103 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
105 testONNXModels("maxpooling", npy, 0, 0, false, false);
107 TEST_P(Test_ONNX_layers, MaxPooling_2)
109 testONNXModels("two_maxpooling", npy, 0, 0, false, false);
112 TEST_P(Test_ONNX_layers, Convolution)
114 testONNXModels("convolution");
117 TEST_P(Test_ONNX_layers, Convolution_variable_weight)
119 if ((backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH ||
120 backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019) && target == DNN_TARGET_MYRIAD)
121 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
123 if (backend == DNN_BACKEND_CUDA)
124 applyTestTag(CV_TEST_TAG_DNN_SKIP_CUDA); // not supported
125 if (backend == DNN_BACKEND_VKCOM)
126 applyTestTag(CV_TEST_TAG_DNN_SKIP_VULKAN); // not supported
127 String basename = "conv_variable_w";
128 Net net = readNetFromONNX(_tf("models/" + basename + ".onnx"));
129 ASSERT_FALSE(net.empty());
131 net.setPreferableBackend(backend);
132 net.setPreferableTarget(target);
134 for (int i = 0; i < 2; i++)
136 Mat input = blobFromNPY(_tf("data/input_" + basename + format("_%d", i) + "_0.npy"));
137 Mat weights = blobFromNPY(_tf("data/input_" + basename + format("_%d", i) + "_1.npy"));
138 Mat ref = blobFromNPY(_tf("data/output_" + basename + format("_%d", i) + ".npy"));
140 net.setInput(input, "0");
141 net.setInput(weights, "1");
143 Mat out = net.forward();
144 normAssert(ref, out, "", default_l1, default_lInf);
148 TEST_P(Test_ONNX_layers, Convolution_variable_weight_bias)
150 if ((backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH ||
151 backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019) && target == DNN_TARGET_MYRIAD)
152 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
154 if (backend == DNN_BACKEND_CUDA)
155 applyTestTag(CV_TEST_TAG_DNN_SKIP_CUDA); // not supported
156 if (backend == DNN_BACKEND_VKCOM)
157 applyTestTag(CV_TEST_TAG_DNN_SKIP_VULKAN); // not supported
159 String basename = "conv_variable_wb";
160 Net net = readNetFromONNX(_tf("models/" + basename + ".onnx"));
161 ASSERT_FALSE(net.empty());
163 net.setPreferableBackend(backend);
164 net.setPreferableTarget(target);
166 for (int i = 0; i < 2; i++)
168 Mat input = blobFromNPY(_tf("data/input_" + basename + format("_%d", i) + "_0.npy"));
169 Mat weights = blobFromNPY(_tf("data/input_" + basename + format("_%d", i) + "_1.npy"));
170 Mat bias = blobFromNPY(_tf("data/input_" + basename + format("_%d", i) + "_2.npy"));
171 Mat ref = blobFromNPY(_tf("data/output_" + basename + format("_%d", i) + ".npy"));
173 net.setInput(input, "0");
174 net.setInput(weights, "1");
175 net.setInput(bias, "bias");
177 Mat out = net.forward();
178 normAssert(ref, out, "", default_l1, default_lInf);
182 TEST_P(Test_ONNX_layers, Gather)
184 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
185 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
186 testONNXModels("gather");
187 // GPU plugin unsupported slice for constant
188 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
189 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
190 testONNXModels("gather_scalar", npy, 0, 0, false, false);
193 TEST_P(Test_ONNX_layers, Convolution3D)
195 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
196 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
198 testONNXModels("conv3d");
201 TEST_P(Test_ONNX_layers, Convolution3D_bias)
203 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
204 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
206 testONNXModels("conv3d_bias");
209 TEST_P(Test_ONNX_layers, Two_convolution)
211 #if defined(INF_ENGINE_RELEASE)
212 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
213 && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
215 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
217 // Reference output values are in range [-0.855, 0.611]
218 testONNXModels("two_convolution");
221 TEST_P(Test_ONNX_layers, Deconvolution)
223 testONNXModels("deconvolution", npy, 0, 0, false, false);
224 testONNXModels("two_deconvolution", npy, 0, 0, false, false);
225 testONNXModels("deconvolution_group", npy, 0, 0, false, false);
226 testONNXModels("deconvolution_output_shape", npy, 0, 0, false, false);
227 if (target != DNN_TARGET_CUDA_FP16) // bug
228 testONNXModels("deconv_adjpad_2d", npy, 0, 0, false, false);
231 TEST_P(Test_ONNX_layers, Deconvolution3D)
233 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2018050000)
234 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
236 if (backend == DNN_BACKEND_CUDA)
240 else if (backend == DNN_BACKEND_OPENCV || target != DNN_TARGET_CPU)
241 throw SkipTestException("Only DLIE backend on CPU is supported");
242 testONNXModels("deconv3d");
243 testONNXModels("deconv3d_bias");
244 testONNXModels("deconv3d_pad");
245 testONNXModels("deconv3d_adjpad");
248 TEST_P(Test_ONNX_layers, Dropout)
250 testONNXModels("dropout");
253 TEST_P(Test_ONNX_layers, Linear)
255 if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
256 applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
257 testONNXModels("linear");
260 TEST_P(Test_ONNX_layers, ReLU)
262 testONNXModels("ReLU");
265 TEST_P(Test_ONNX_layers, Clip)
267 testONNXModels("clip", npy);
270 TEST_P(Test_ONNX_layers, Shape)
272 testONNXModels("shape_of_constant");
275 TEST_P(Test_ONNX_layers, ReduceMean)
277 testONNXModels("reduce_mean");
278 testONNXModels("reduce_mean_axis1");
279 testONNXModels("reduce_mean_axis2");
282 TEST_P(Test_ONNX_layers, ReduceSum)
284 testONNXModels("reduce_sum");
287 TEST_P(Test_ONNX_layers, ReduceMax)
289 testONNXModels("reduce_max");
290 testONNXModels("reduce_max_axis_0");
291 testONNXModels("reduce_max_axis_1");
294 TEST_P(Test_ONNX_layers, Scale)
296 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
297 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
298 testONNXModels("scale");
301 TEST_P(Test_ONNX_layers, ReduceMean3D)
303 if (backend == DNN_BACKEND_CUDA)
307 else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
308 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER); // Only CPU on DLIE backend is supported
309 else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
310 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH); // Only CPU on DLIE backend is supported
311 else if (target != DNN_TARGET_CPU)
312 throw SkipTestException("Only CPU is supported");
314 testONNXModels("reduce_mean3d");
317 TEST_P(Test_ONNX_layers, MaxPooling_Sigmoid)
319 testONNXModels("maxpooling_sigmoid");
322 TEST_P(Test_ONNX_layers, Cast)
324 testONNXModels("cast");
327 TEST_P(Test_ONNX_layers, Power)
329 testONNXModels("pow2", npy, 0, 0, false, false);
332 TEST_P(Test_ONNX_layers, Concatenation)
334 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
336 if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
337 if (target == DNN_TARGET_OPENCL) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
338 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
340 testONNXModels("concatenation");
343 TEST_P(Test_ONNX_layers, Eltwise3D)
345 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
346 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
348 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
349 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER); // Only CPU on DLIE backend is supported
350 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
351 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH); // Only CPU on DLIE backend is supported
352 testONNXModels("eltwise3d");
355 TEST_P(Test_ONNX_layers, AveragePooling)
357 testONNXModels("average_pooling");
360 TEST_P(Test_ONNX_layers, MaxPooling3D)
362 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
363 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
365 if (backend == DNN_BACKEND_CUDA)
369 else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
370 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER); // Only CPU on DLIE backend is supported
371 else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
372 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH); // Only CPU on DLIE backend is supported
373 else if (target != DNN_TARGET_CPU)
374 throw SkipTestException("Only CPU is supported");
375 testONNXModels("max_pool3d", npy, 0, 0, false, false);
378 TEST_P(Test_ONNX_layers, AvePooling3D)
380 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
381 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
383 if (backend == DNN_BACKEND_CUDA)
387 else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
388 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER); // Only CPU on DLIE backend is supported
389 else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
390 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH); // Only CPU on DLIE backend is supported
391 else if (target != DNN_TARGET_CPU)
392 throw SkipTestException("Only CPU is supported");
393 testONNXModels("ave_pool3d");
396 TEST_P(Test_ONNX_layers, PoolConv3D)
398 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
399 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
401 if (backend == DNN_BACKEND_CUDA)
405 else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
406 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER); // Only CPU on DLIE backend is supported
407 else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
408 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH); // Only CPU on DLIE backend is supported
409 else if (target != DNN_TARGET_CPU)
410 throw SkipTestException("Only CPU is supported");
411 testONNXModels("pool_conv_3d");
414 TEST_P(Test_ONNX_layers, BatchNormalization)
416 testONNXModels("batch_norm");
419 TEST_P(Test_ONNX_layers, BatchNormalization3D)
421 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
423 if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
424 if (target == DNN_TARGET_OPENCL) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
425 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
427 testONNXModels("batch_norm_3d");
430 TEST_P(Test_ONNX_layers, BatchNormalizationUnfused)
432 testONNXModels("frozenBatchNorm2d");
435 TEST_P(Test_ONNX_layers, BatchNormalizationSubgraph)
437 testONNXModels("batch_norm_subgraph");
440 TEST_P(Test_ONNX_layers, Transpose)
442 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
444 if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
445 if (target == DNN_TARGET_OPENCL) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
446 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
448 testONNXModels("transpose");
451 TEST_P(Test_ONNX_layers, Multiplication)
453 if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
454 applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
455 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
456 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
457 testONNXModels("mul");
460 TEST_P(Test_ONNX_layers, MatMul)
462 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
463 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
464 if (backend == DNN_BACKEND_CUDA)
465 applyTestTag(CV_TEST_TAG_DNN_SKIP_CUDA); // not supported
467 testONNXModels("matmul_2d");
468 testONNXModels("matmul_3d");
469 testONNXModels("matmul_4d");
472 TEST_P(Test_ONNX_layers, MatMulAdd)
474 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
475 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
476 if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
477 applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
478 testONNXModels("matmul_add");
481 TEST_P(Test_ONNX_layers, Expand)
483 testONNXModels("expand_batch");
484 testONNXModels("expand_channels");
485 testONNXModels("expand_neg_batch");
488 TEST_P(Test_ONNX_layers, ExpandHW)
490 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
491 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
492 testONNXModels("expand_hw");
495 TEST_P(Test_ONNX_layers, Constant)
497 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2018050000)
498 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
499 && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
500 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
502 testONNXModels("constant");
505 TEST_P(Test_ONNX_layers, Padding)
507 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
508 testONNXModels("padding", npy, 0, 0, false, false);
510 testONNXModels("padding");
514 TEST_P(Test_ONNX_layers, Resize)
516 testONNXModels("resize_nearest");
517 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
518 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
519 testONNXModels("resize_bilinear");
522 TEST_P(Test_ONNX_layers, ResizeUnfused)
524 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
525 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
526 testONNXModels("upsample_unfused_torch1.2");
527 testONNXModels("upsample_unfused_opset9_torch1.4");
528 testONNXModels("resize_nearest_unfused_opset11_torch1.4");
529 testONNXModels("resize_nearest_unfused_opset11_torch1.3");
530 testONNXModels("resize_bilinear_unfused_opset11_torch1.4");
533 TEST_P(Test_ONNX_layers, ResizeUnfusedTwoInputs)
535 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
536 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
537 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
538 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
539 testONNXModels("upsample_unfused_two_inputs_opset9_torch1.4", npy, 0, 0, false, true, 2);
540 testONNXModels("upsample_unfused_two_inputs_opset11_torch1.4", npy, 0, 0, false, true, 2);
543 TEST_P(Test_ONNX_layers, MultyInputs)
545 testONNXModels("multy_inputs", npy, 0, 0, false, true, 2);
548 TEST_P(Test_ONNX_layers, Broadcast)
550 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
551 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
552 testONNXModels("channel_broadcast", npy, 0, 0, false, true, 2);
555 TEST_P(Test_ONNX_layers, DynamicResize)
557 testONNXModels("dynamic_resize_9", npy, 0, 0, false, true, 2);
558 testONNXModels("dynamic_resize_10", npy, 0, 0, false, true, 2);
559 testONNXModels("dynamic_resize_11", npy, 0, 0, false, true, 2);
560 testONNXModels("dynamic_resize_scale_9", npy, 0, 0, false, true, 2);
561 testONNXModels("dynamic_resize_scale_10", npy, 0, 0, false, true, 2);
562 testONNXModels("dynamic_resize_scale_11", npy, 0, 0, false, true, 2);
565 TEST_P(Test_ONNX_layers, Div)
567 const String model = _tf("models/div.onnx");
568 Net net = readNetFromONNX(model);
569 ASSERT_FALSE(net.empty());
571 net.setPreferableBackend(backend);
572 net.setPreferableTarget(target);
574 // Reference output values range is -68.80928, 2.991873. So to avoid computational
575 // difference for FP16 we'll perform reversed division (just swap inputs).
576 Mat inp1 = blobFromNPY(_tf("data/input_div_1.npy"));
577 Mat inp2 = blobFromNPY(_tf("data/input_div_0.npy"));
578 Mat ref = blobFromNPY(_tf("data/output_div.npy"));
579 cv::divide(1.0, ref, ref);
580 checkBackend(&inp1, &ref);
582 net.setInput(inp1, "0");
583 net.setInput(inp2, "1");
584 Mat out = net.forward();
586 normAssert(ref, out, "", default_l1, default_lInf);
587 expectNoFallbacksFromIE(net);
588 expectNoFallbacksFromCUDA(net);
591 TEST_P(Test_ONNX_layers, DynamicReshape)
593 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
594 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
596 testONNXModels("dynamic_reshape");
597 testONNXModels("dynamic_reshape_opset_11");
598 testONNXModels("flatten_by_prod");
599 testONNXModels("flatten_const");
602 TEST_P(Test_ONNX_layers, Reshape)
604 testONNXModels("unsqueeze");
607 TEST_P(Test_ONNX_layers, Squeeze)
609 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
610 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
611 testONNXModels("squeeze");
614 TEST_P(Test_ONNX_layers, ReduceL2)
616 testONNXModels("reduceL2");
617 testONNXModels("reduceL2_subgraph");
618 testONNXModels("reduceL2_subgraph_2");
621 TEST_P(Test_ONNX_layers, Split)
623 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
624 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
625 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
626 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
627 testONNXModels("split_1");
628 testONNXModels("split_2");
629 testONNXModels("split_3");
630 testONNXModels("split_4");
633 TEST_P(Test_ONNX_layers, Slice)
635 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
636 testONNXModels("slice", npy, 0, 0, false, false);
638 testONNXModels("slice");
639 testONNXModels("slice_opset_11");
643 TEST_P(Test_ONNX_layers, Softmax)
645 testONNXModels("softmax");
646 testONNXModels("log_softmax", npy, 0, 0, false, false);
647 testONNXModels("softmax_unfused");
650 TEST_P(Test_ONNX_layers, Split_EltwiseMax)
652 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
653 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
654 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
655 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
656 testONNXModels("split_max");
659 TEST_P(Test_ONNX_layers, LSTM)
661 testONNXModels("lstm", npy, 0, 0, false, false);
664 TEST_P(Test_ONNX_layers, LSTM_bidirectional)
666 testONNXModels("lstm_bidirectional", npy, 0, 0, false, false);
669 TEST_P(Test_ONNX_layers, Pad2d_Unfused)
671 testONNXModels("ReflectionPad2d");
672 testONNXModels("ZeroPad2d");
675 TEST_P(Test_ONNX_layers, LinearWithConstant)
677 if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
678 applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
679 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2020040000)
680 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
682 if (backend == DNN_BACKEND_CUDA)
683 applyTestTag(CV_TEST_TAG_DNN_SKIP_CUDA);
684 testONNXModels("lin_with_constant");
687 TEST_P(Test_ONNX_layers, MatmulWithTwoInputs)
689 if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
690 applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
691 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2020040000)
692 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
694 if (backend == DNN_BACKEND_CUDA)
695 applyTestTag(CV_TEST_TAG_DNN_SKIP_CUDA);
696 testONNXModels("matmul_with_two_inputs");
699 TEST_P(Test_ONNX_layers, ResizeOpset11_Torch1_6)
701 testONNXModels("resize_opset11_torch1.6");
704 TEST_P(Test_ONNX_layers, Mish)
706 testONNXModels("mish");
709 TEST_P(Test_ONNX_layers, CalculatePads)
711 testONNXModels("calc_pads");
714 TEST_P(Test_ONNX_layers, Conv1d)
716 testONNXModels("conv1d");
719 TEST_P(Test_ONNX_layers, Conv1d_bias)
721 testONNXModels("conv1d_bias");
724 TEST_P(Test_ONNX_layers, Conv1d_variable_weight)
726 if (backend == DNN_BACKEND_CUDA)
727 applyTestTag(CV_TEST_TAG_DNN_SKIP_CUDA); // not supported
728 if (backend == DNN_BACKEND_VKCOM)
729 applyTestTag(CV_TEST_TAG_DNN_SKIP_VULKAN); // not supported
730 String basename = "conv1d_variable_w";
731 Net net = readNetFromONNX(_tf("models/" + basename + ".onnx"));
732 ASSERT_FALSE(net.empty());
734 net.setPreferableBackend(backend);
735 net.setPreferableTarget(target);
737 Mat input = blobFromNPY(_tf("data/input_" + basename + "_0.npy"));
738 Mat weights = blobFromNPY(_tf("data/input_" + basename + "_1.npy"));
739 Mat ref = blobFromNPY(_tf("data/output_" + basename + ".npy"));
741 net.setInput(input, "0");
742 net.setInput(weights, "1");
744 Mat out = net.forward();
745 normAssert(ref, out, "", default_l1, default_lInf);
748 TEST_P(Test_ONNX_layers, Conv1d_variable_weight_bias)
750 if (backend == DNN_BACKEND_CUDA)
751 applyTestTag(CV_TEST_TAG_DNN_SKIP_CUDA); // not supported
752 if (backend == DNN_BACKEND_VKCOM)
753 applyTestTag(CV_TEST_TAG_DNN_SKIP_VULKAN); // not supported
754 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
756 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
758 String basename = "conv1d_variable_wb";
759 Net net = readNetFromONNX(_tf("models/" + basename + ".onnx"));
760 ASSERT_FALSE(net.empty());
762 net.setPreferableBackend(backend);
763 net.setPreferableTarget(target);
765 Mat input = blobFromNPY(_tf("data/input_" + basename + "_0.npy"));
766 Mat weights = blobFromNPY(_tf("data/input_" + basename + "_1.npy"));
767 Mat bias = blobFromNPY(_tf("data/input_" + basename + "_2.npy"));
768 Mat ref = blobFromNPY(_tf("data/output_" + basename + ".npy"));
770 net.setInput(input, "0");
771 net.setInput(weights, "1");
772 net.setInput(bias, "bias");
774 Mat out = net.forward();
775 normAssert(ref, out, "", default_l1, default_lInf);
778 TEST_P(Test_ONNX_layers, GatherMultiOutput)
780 #if defined(INF_ENGINE_RELEASE)
781 if (target == DNN_TARGET_MYRIAD)
782 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE);
785 testONNXModels("gather_multi_output");
788 TEST_P(Test_ONNX_layers, DynamicAxes)
790 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
792 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
794 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
796 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
798 testONNXModels("squeeze_and_conv_dynamic_axes");
799 testONNXModels("unsqueeze_and_conv_dynamic_axes");
800 testONNXModels("gather_dynamic_axes");
801 testONNXModels("gather_scalar_dynamic_axes");
802 testONNXModels("slice_dynamic_axes");
803 testONNXModels("slice_opset_11_dynamic_axes");
804 testONNXModels("resize_opset11_torch1.6_dynamic_axes");
805 testONNXModels("average_pooling_dynamic_axes");
806 testONNXModels("maxpooling_sigmoid_dynamic_axes");
809 TEST_P(Test_ONNX_layers, MaxPool1d)
811 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
813 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
815 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
817 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
819 testONNXModels("maxpooling_1d");
822 TEST_P(Test_ONNX_layers, MaxPoolSigmoid1d)
824 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
826 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
828 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
830 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
832 testONNXModels("maxpooling_sigmoid_1d");
835 TEST_P(Test_ONNX_layers, MaxPool1d_Twise)
837 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
839 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
841 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
843 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
845 testONNXModels("two_maxpooling_1d");
848 TEST_P(Test_ONNX_layers, AvePool1d)
850 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
852 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
854 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
856 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
858 testONNXModels("average_pooling_1d");
861 TEST_P(Test_ONNX_layers, PoolConv1d)
863 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
865 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
867 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
869 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
871 testONNXModels("pool_conv_1d");
874 TEST_P(Test_ONNX_layers, ConvResizePool1d)
876 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
878 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
880 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
882 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
884 testONNXModels("conv_resize_pool_1d");
887 INSTANTIATE_TEST_CASE_P(/*nothing*/, Test_ONNX_layers, dnnBackendsAndTargets());
889 class Test_ONNX_nets : public Test_ONNX_layers
892 Test_ONNX_nets() { required = false; }
895 TEST_P(Test_ONNX_nets, Alexnet)
897 #if defined(OPENCV_32BIT_CONFIGURATION) && (defined(HAVE_OPENCL) || defined(_WIN32))
898 applyTestTag(CV_TEST_TAG_MEMORY_2GB);
900 applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
903 const String model = _tf("models/alexnet.onnx", false);
905 Net net = readNetFromONNX(model);
906 ASSERT_FALSE(net.empty());
908 net.setPreferableBackend(backend);
909 net.setPreferableTarget(target);
911 Mat inp = imread(_tf("../grace_hopper_227.png"));
912 Mat ref = blobFromNPY(_tf("../caffe_alexnet_prob.npy"));
913 checkBackend(&inp, &ref);
915 net.setInput(blobFromImage(inp, 1.0f, Size(227, 227), Scalar(), false));
916 ASSERT_FALSE(net.empty());
917 Mat out = net.forward();
919 normAssert(out, ref, "", default_l1, default_lInf);
920 expectNoFallbacksFromIE(net);
923 TEST_P(Test_ONNX_nets, Squeezenet)
925 testONNXModels("squeezenet", pb);
928 TEST_P(Test_ONNX_nets, Googlenet)
930 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
931 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
933 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
934 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
936 const String model = _tf("models/googlenet.onnx", false);
938 Net net = readNetFromONNX(model);
939 ASSERT_FALSE(net.empty());
941 net.setPreferableBackend(backend);
942 net.setPreferableTarget(target);
944 std::vector<Mat> images;
945 images.push_back( imread(_tf("../googlenet_0.png")) );
946 images.push_back( imread(_tf("../googlenet_1.png")) );
947 Mat inp = blobFromImages(images, 1.0f, Size(), Scalar(), false);
948 Mat ref = blobFromNPY(_tf("../googlenet_prob.npy"));
949 checkBackend(&inp, &ref);
952 ASSERT_FALSE(net.empty());
953 Mat out = net.forward();
955 normAssert(ref, out, "", default_l1, default_lInf);
956 expectNoFallbacksFromIE(net);
959 TEST_P(Test_ONNX_nets, CaffeNet)
961 #if defined(OPENCV_32BIT_CONFIGURATION) && (defined(HAVE_OPENCL) || defined(_WIN32))
962 applyTestTag(CV_TEST_TAG_MEMORY_2GB);
964 applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
967 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019030000)
968 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
969 && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
970 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
972 testONNXModels("caffenet", pb);
975 TEST_P(Test_ONNX_nets, RCNN_ILSVRC13)
977 #if defined(OPENCV_32BIT_CONFIGURATION) && (defined(HAVE_OPENCL) || defined(_WIN32))
978 applyTestTag(CV_TEST_TAG_MEMORY_2GB);
980 applyTestTag(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
983 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019030000)
984 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD
985 && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
986 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
988 // Reference output values are in range [-4.992, -1.161]
989 testONNXModels("rcnn_ilsvrc13", pb, 0.0046);
992 TEST_P(Test_ONNX_nets, VGG16_bn)
994 applyTestTag(CV_TEST_TAG_MEMORY_6GB); // > 2.3Gb
996 // output range: [-16; 27], after Softmax [0; 0.67]
997 const double lInf = (target == DNN_TARGET_MYRIAD) ? 0.038 : default_lInf;
998 testONNXModels("vgg16-bn", pb, default_l1, lInf, true);
1001 TEST_P(Test_ONNX_nets, ZFNet)
1003 applyTestTag(CV_TEST_TAG_MEMORY_2GB);
1004 testONNXModels("zfnet512", pb);
1007 TEST_P(Test_ONNX_nets, ResNet18v1)
1009 applyTestTag(CV_TEST_TAG_MEMORY_512MB);
1011 // output range: [-16; 22], after Softmax [0, 0.51]
1012 testONNXModels("resnet18v1", pb, default_l1, default_lInf, true, target != DNN_TARGET_MYRIAD);
1015 TEST_P(Test_ONNX_nets, ResNet50v1)
1017 applyTestTag(CV_TEST_TAG_MEMORY_512MB);
1019 // output range: [-67; 75], after Softmax [0, 0.98]
1020 testONNXModels("resnet50v1", pb, default_l1, default_lInf, true, target != DNN_TARGET_MYRIAD);
1023 TEST_P(Test_ONNX_nets, ResNet101_DUC_HDC)
1025 applyTestTag(CV_TEST_TAG_VERYLONG);
1027 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
1028 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
1029 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER, CV_TEST_TAG_DNN_SKIP_IE_VERSION);
1031 #if defined(INF_ENGINE_RELEASE)
1032 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_MYRIAD)
1033 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
1035 if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL)
1037 if (backend == DNN_BACKEND_OPENCV)
1038 applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_OPENCL : CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
1039 throw SkipTestException("Test is disabled for OpenCL targets");
1041 testONNXModels("resnet101_duc_hdc", pb);
1044 TEST_P(Test_ONNX_nets, TinyYolov2)
1046 applyTestTag(CV_TEST_TAG_MEMORY_512MB);
1048 if (cvtest::skipUnstableTests)
1049 throw SkipTestException("Skip unstable test");
1050 #if defined(INF_ENGINE_RELEASE)
1051 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019
1052 && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)
1054 applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
1056 if (target == DNN_TARGET_MYRIAD && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
1058 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X,
1059 backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ?
1060 CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER :
1061 CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
1064 // output range: [-11; 8]
1065 double l1 = default_l1, lInf = default_lInf;
1066 if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD)
1071 else if (target == DNN_TARGET_CUDA_FP16)
1076 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2020040000)
1077 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16)
1079 l1 = 0.018f; lInf = 0.16f;
1083 testONNXModels("tiny_yolo2", pb, l1, lInf);
1086 TEST_P(Test_ONNX_nets, CNN_MNIST)
1088 // output range: [-1952; 6574], after Softmax [0; 1]
1089 testONNXModels("cnn_mnist", pb, default_l1, default_lInf, true);
1092 TEST_P(Test_ONNX_nets, MobileNet_v2)
1094 // output range: [-166; 317], after Softmax [0; 1]
1095 testONNXModels("mobilenetv2", pb, default_l1, default_lInf, true);
1098 TEST_P(Test_ONNX_nets, LResNet100E_IR)
1101 #if defined(OPENCV_32BIT_CONFIGURATION) && defined(HAVE_OPENCL)
1102 CV_TEST_TAG_MEMORY_2GB,
1104 (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB),
1106 CV_TEST_TAG_DEBUG_LONG
1108 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
1110 if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
1111 if (target == DNN_TARGET_OPENCL) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
1112 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
1114 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH)
1116 if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
1117 if (target == DNN_TARGET_OPENCL) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
1118 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
1121 double l1 = default_l1, lInf = default_lInf;
1122 // output range: [-3; 3]
1123 if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
1128 else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_CPU)
1133 else if (target == DNN_TARGET_CUDA_FP16)
1138 testONNXModels("LResNet100E_IR", pb, l1, lInf);
1141 TEST_P(Test_ONNX_nets, Emotion_ferplus)
1143 #if defined(INF_ENGINE_RELEASE)
1144 if (target == DNN_TARGET_MYRIAD && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
1145 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X,
1146 backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ?
1147 CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER :
1148 CV_TEST_TAG_DNN_SKIP_IE_NGRAPH);
1151 double l1 = default_l1;
1152 double lInf = default_lInf;
1154 // Output values are in range [-2.011, 2.111]
1155 if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
1157 else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target == DNN_TARGET_OPENCL_FP16)
1162 else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && (target == DNN_TARGET_CPU || target == DNN_TARGET_OPENCL)) {
1166 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2020040000)
1167 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target == DNN_TARGET_OPENCL_FP16)
1169 l1 = 0.012f; lInf = 0.035f;
1173 testONNXModels("emotion_ferplus", pb, l1, lInf);
1176 TEST_P(Test_ONNX_nets, Inception_v2)
1178 testONNXModels("inception_v2", pb, default_l1, default_lInf, true);
1181 TEST_P(Test_ONNX_nets, DenseNet121)
1183 applyTestTag(CV_TEST_TAG_MEMORY_512MB);
1185 // output range: [-87; 138], after Softmax [0; 1]
1186 testONNXModels("densenet121", pb, default_l1, default_lInf, true, target != DNN_TARGET_MYRIAD);
1189 TEST_P(Test_ONNX_nets, Inception_v1)
1191 #if defined(INF_ENGINE_RELEASE)
1192 if ((backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 ||
1193 backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH) && target == DNN_TARGET_MYRIAD)
1194 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
1196 testONNXModels("inception_v1", pb);
1199 TEST_P(Test_ONNX_nets, Shufflenet)
1201 if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019)
1203 if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
1204 if (target == DNN_TARGET_OPENCL) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
1205 if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER);
1207 testONNXModels("shufflenet", pb);
1210 TEST_P(Test_ONNX_nets, Resnet34_kinetics)
1212 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LT(2019010000)
1213 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_VERSION);
1215 if (backend == DNN_BACKEND_CUDA)
1219 else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019 && target != DNN_TARGET_CPU)
1220 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER); // Only CPU on DLIE backend is supported
1221 else if (backend == DNN_BACKEND_INFERENCE_ENGINE_NGRAPH && target != DNN_TARGET_CPU)
1222 applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_NGRAPH); // Only CPU on DLIE backend is supported
1223 else if (target != DNN_TARGET_CPU)
1224 throw SkipTestException("Only CPU is supported");
1226 String onnxmodel = findDataFile("dnn/resnet-34_kinetics.onnx", false);
1227 Mat image0 = imread(findDataFile("dnn/dog416.png"));
1228 Mat image1 = imread(findDataFile("dnn/street.png"));
1230 Mat ref0 = blobFromNPY(_tf("data/output_kinetics0.npy"));
1231 Mat ref1 = blobFromNPY(_tf("data/output_kinetics1.npy"));
1233 std::vector<Mat> images_0(16, image0);
1234 std::vector<Mat> images_1(16, image1);
1235 Mat blob0 = blobFromImages(images_0, 1.0, Size(112, 112), Scalar(114.7748, 107.7354, 99.4750), true, true);
1236 Mat blob1 = blobFromImages(images_1, 1.0, Size(112, 112), Scalar(114.7748, 107.7354, 99.4750), true, true);
1240 int order[] = {1, 0, 2, 3};
1241 lp.set("order", DictValue::arrayInt<int*>(&order[0], 4));
1242 permute.addLayerToPrev("perm", "Permute", lp);
1244 permute.setPreferableBackend(backend);
1245 permute.setPreferableTarget(target);
1247 permute.setInput(blob0);
1248 Mat input0 = permute.forward().clone();
1250 permute.setInput(blob1);
1251 Mat input1 = permute.forward().clone();
1253 int dims[] = {1, 3, 16, 112, 112};
1254 input0 = input0.reshape(0, 5, &dims[0]);
1255 input1 = input1.reshape(0, 5, &dims[0]);
1257 Net net = readNetFromONNX(onnxmodel);
1258 ASSERT_FALSE(net.empty());
1259 net.setPreferableBackend(backend);
1260 net.setPreferableTarget(target);
1262 // output range [-5, 11]
1263 float l1 = 0.0013, lInf = 0.009;
1264 if (target == DNN_TARGET_CUDA_FP16)
1270 checkBackend(&input0, &ref0);
1271 net.setInput(input0);
1272 Mat out = net.forward().clone();
1273 normAssert(ref0, out, "", l1, lInf);
1275 checkBackend(&input1, &ref1);
1276 net.setInput(input1);
1277 out = net.forward().clone();
1278 normAssert(ref1, out, "", l1, lInf);
1280 expectNoFallbacksFromIE(net);
1283 INSTANTIATE_TEST_CASE_P(/**/, Test_ONNX_nets, dnnBackendsAndTargets());