1 // This file is part of OpenCV project.
2 // It is subject to the license terms in the LICENSE file found in the top-level directory
3 // of this distribution and at http://opencv.org/license.html.
5 // Copyright (C) 2017, Intel Corporation, all rights reserved.
6 // Third party copyrights are property of their respective owners.
8 #include "test_precomp.hpp"
9 #include <opencv2/core/ocl.hpp>
10 #include <opencv2/core/opencl/ocl_defs.hpp>
11 #include <opencv2/dnn/layer.details.hpp> // CV_DNN_REGISTER_LAYER_CLASS
13 namespace opencv_test { namespace {
15 TEST(blobFromImage_4ch, Regression)
18 for(int i = 0; i < 4; i++)
19 ch[i] = Mat::ones(10, 10, CV_8U)*i;
23 Mat blob = dnn::blobFromImage(img, 1., Size(), Scalar(), false, false);
25 for(int i = 0; i < 4; i++)
27 ch[i] = Mat(img.rows, img.cols, CV_32F, blob.ptr(0, i));
28 ASSERT_DOUBLE_EQ(cvtest::norm(ch[i], cv::NORM_INF), i);
32 TEST(blobFromImage, allocated)
34 int size[] = {1, 3, 4, 5};
35 Mat img(size[2], size[3], CV_32FC(size[1]));
36 Mat blob(4, size, CV_32F);
37 void* blobData = blob.data;
38 dnn::blobFromImage(img, blob, 1.0 / 255, Size(), Scalar(), false, false);
39 ASSERT_EQ(blobData, blob.data);
42 TEST(imagesFromBlob, Regression)
46 std::vector<cv::Mat> inputImgs(nbOfImages);
47 for (int i = 0; i < nbOfImages; i++)
49 inputImgs[i] = cv::Mat::ones(100, 100, CV_32FC3);
50 cv::randu(inputImgs[i], cv::Scalar::all(0), cv::Scalar::all(1));
53 cv::Mat blob = cv::dnn::blobFromImages(inputImgs, 1., cv::Size(), cv::Scalar(), false, false);
54 std::vector<cv::Mat> outputImgs;
55 cv::dnn::imagesFromBlob(blob, outputImgs);
57 for (int i = 0; i < nbOfImages; i++)
59 ASSERT_EQ(cv::countNonZero(inputImgs[i] != outputImgs[i]), 0);
63 TEST(readNet, Regression)
65 Net net = readNet(findDataFile("dnn/squeezenet_v1.1.prototxt"),
66 findDataFile("dnn/squeezenet_v1.1.caffemodel", false));
67 EXPECT_FALSE(net.empty());
68 net = readNet(findDataFile("dnn/opencv_face_detector.caffemodel", false),
69 findDataFile("dnn/opencv_face_detector.prototxt"));
70 EXPECT_FALSE(net.empty());
71 net = readNet(findDataFile("dnn/openface_nn4.small2.v1.t7", false));
72 EXPECT_FALSE(net.empty());
73 net = readNet(findDataFile("dnn/tiny-yolo-voc.cfg"),
74 findDataFile("dnn/tiny-yolo-voc.weights", false));
75 EXPECT_FALSE(net.empty());
76 net = readNet(findDataFile("dnn/ssd_mobilenet_v1_coco.pbtxt"),
77 findDataFile("dnn/ssd_mobilenet_v1_coco.pb", false));
78 EXPECT_FALSE(net.empty());
81 typedef testing::TestWithParam<tuple<Backend, Target> > dump;
82 TEST_P(dump, Regression)
84 const int backend = get<0>(GetParam());
85 const int target = get<1>(GetParam());
86 Net net = readNet(findDataFile("dnn/squeezenet_v1.1.prototxt"),
87 findDataFile("dnn/squeezenet_v1.1.caffemodel", false));
89 int size[] = {1, 3, 227, 227};
90 Mat input = cv::Mat::ones(4, size, CV_32F);
92 net.setPreferableBackend(backend);
93 net.setPreferableTarget(target);
94 EXPECT_FALSE(net.dump().empty());
96 EXPECT_FALSE(net.dump().empty());
99 INSTANTIATE_TEST_CASE_P(/**/, dump, dnnBackendsAndTargets());
101 class FirstCustomLayer CV_FINAL : public Layer
104 FirstCustomLayer(const LayerParams ¶ms) : Layer(params) {}
106 static Ptr<Layer> create(LayerParams& params)
108 return Ptr<Layer>(new FirstCustomLayer(params));
111 void forward(InputArrayOfArrays, OutputArrayOfArrays outputs_arr, OutputArrayOfArrays) CV_OVERRIDE
114 CV_TRACE_ARG_VALUE(name, "name", name.c_str());
116 std::vector<Mat> outputs;
117 outputs_arr.getMatVector(outputs);
122 class SecondCustomLayer CV_FINAL : public Layer
125 SecondCustomLayer(const LayerParams ¶ms) : Layer(params) {}
127 static Ptr<Layer> create(LayerParams& params)
129 return Ptr<Layer>(new SecondCustomLayer(params));
132 void forward(InputArrayOfArrays, OutputArrayOfArrays outputs_arr, OutputArrayOfArrays) CV_OVERRIDE
135 CV_TRACE_ARG_VALUE(name, "name", name.c_str());
137 std::vector<Mat> outputs;
138 outputs_arr.getMatVector(outputs);
143 TEST(LayerFactory, custom_layers)
147 lp.type = "CustomType";
149 Mat inp(1, 1, CV_32FC1);
150 for (int i = 0; i < 3; ++i)
152 if (i == 0) { CV_DNN_REGISTER_LAYER_CLASS(CustomType, FirstCustomLayer); }
153 else if (i == 1) { CV_DNN_REGISTER_LAYER_CLASS(CustomType, SecondCustomLayer); }
154 else if (i == 2) { LayerFactory::unregisterLayer("CustomType"); }
157 net.addLayerToPrev(lp.name, lp.type, lp);
160 net.setPreferableBackend(DNN_BACKEND_OPENCV);
161 Mat output = net.forward();
163 if (i == 0) { EXPECT_EQ(output.at<float>(0), 1); }
164 else if (i == 1) { EXPECT_EQ(output.at<float>(0), 2); }
165 else if (i == 2) { EXPECT_EQ(output.at<float>(0), 1); }
167 LayerFactory::unregisterLayer("CustomType");
170 typedef testing::TestWithParam<tuple<float, Vec3f, int, tuple<Backend, Target> > > setInput;
171 TEST_P(setInput, normalization)
173 const float kScale = get<0>(GetParam());
174 const Scalar kMean = get<1>(GetParam());
175 const int dtype = get<2>(GetParam());
176 const int backend = get<0>(get<3>(GetParam()));
177 const int target = get<1>(get<3>(GetParam()));
178 const bool kSwapRB = true;
180 if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16 && dtype != CV_32F)
181 applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
182 if (backend == DNN_BACKEND_VKCOM && dtype != CV_32F)
183 throw SkipTestException(CV_TEST_TAG_DNN_SKIP_VULKAN);
185 Mat inp(5, 5, CV_8UC3);
187 Mat ref = blobFromImage(inp, kScale, Size(), kMean, kSwapRB, /*crop*/false);
191 net.addLayerToPrev("testLayer", "Identity", lp);
192 net.setPreferableBackend(backend);
193 net.setPreferableTarget(target);
195 Mat blob = blobFromImage(inp, 1.0, Size(), Scalar(), kSwapRB, /*crop*/false, dtype);
196 ASSERT_EQ(blob.type(), dtype);
197 net.setInput(blob, "", kScale, kMean);
198 Mat out = net.forward();
199 ASSERT_EQ(out.type(), CV_32F);
200 normAssert(ref, out, "", 4e-4, 1e-3);
203 INSTANTIATE_TEST_CASE_P(/**/, setInput, Combine(
204 Values(1.0f, 1.0 / 127.5),
205 Values(Vec3f(), Vec3f(50, 50, 50), Vec3f(10, 50, 140)),
206 Values(CV_32F, CV_8U),
207 dnnBackendsAndTargets()
210 class CustomLayerWithDeprecatedForward CV_FINAL : public Layer
213 CustomLayerWithDeprecatedForward(const LayerParams ¶ms) : Layer(params) {}
215 static Ptr<Layer> create(LayerParams& params)
217 return Ptr<Layer>(new CustomLayerWithDeprecatedForward(params));
220 virtual void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals) CV_OVERRIDE
222 CV_Assert_N(inputs[0]->depth() == CV_32F, outputs[0].depth() == CV_32F);
223 cv::add(*inputs[0], 0.5f, outputs[0]);
227 class CustomLayerWithDeprecatedForwardAndFallback CV_FINAL : public Layer
230 CustomLayerWithDeprecatedForwardAndFallback(const LayerParams ¶ms) : Layer(params) {}
232 static Ptr<Layer> create(LayerParams& params)
234 return Ptr<Layer>(new CustomLayerWithDeprecatedForwardAndFallback(params));
237 void forward(InputArrayOfArrays inputs, OutputArrayOfArrays outputs, OutputArrayOfArrays internals) CV_OVERRIDE
240 CV_TRACE_ARG_VALUE(name, "name", name.c_str());
242 CV_OCL_RUN(preferableTarget == DNN_TARGET_OPENCL || preferableTarget == DNN_TARGET_OPENCL_FP16,
243 forward_ocl(inputs, outputs, internals));
245 Layer::forward_fallback(inputs, outputs, internals);
248 virtual void forward(std::vector<Mat*> &inputs, std::vector<Mat> &outputs, std::vector<Mat> &internals) CV_OVERRIDE
250 CV_Assert_N(inputs[0]->depth() == CV_32F, outputs[0].depth() == CV_32F);
251 cv::add(*inputs[0], 0.5f, outputs[0]);
255 bool forward_ocl(InputArrayOfArrays inputs_arr, OutputArrayOfArrays outputs_arr, OutputArrayOfArrays internals_arr)
257 if (inputs_arr.depth() != CV_32F)
260 std::vector<UMat> inputs;
261 std::vector<UMat> outputs;
262 inputs_arr.getUMatVector(inputs);
263 outputs_arr.getUMatVector(outputs);
264 cv::add(inputs[0], 0.5f, outputs[0]);
270 typedef testing::TestWithParam<tuple<Backend, Target> > DeprecatedForward;
271 TEST_P(DeprecatedForward, CustomLayer)
273 const int backend = get<0>(GetParam());
274 const int target = get<1>(GetParam());
276 Mat inp(5, 5, CV_32FC1);
277 randu(inp, -1.0f, 1.0f);
278 inp = blobFromImage(inp);
280 CV_DNN_REGISTER_LAYER_CLASS(CustomType, CustomLayerWithDeprecatedForward);
285 net.addLayerToPrev("testLayer", "CustomType", lp);
286 net.setPreferableBackend(backend);
287 net.setPreferableTarget(target);
289 Mat out = net.forward();
290 normAssert(out, inp + 0.5f, "", 2e-4, 7e-4);
294 LayerFactory::unregisterLayer("CustomType");
297 LayerFactory::unregisterLayer("CustomType");
300 TEST_P(DeprecatedForward, CustomLayerWithFallback)
302 const int backend = get<0>(GetParam());
303 const int target = get<1>(GetParam());
305 Mat inp(5, 5, CV_32FC1);
306 randu(inp, -1.0f, 1.0f);
307 inp = blobFromImage(inp);
309 CV_DNN_REGISTER_LAYER_CLASS(CustomType, CustomLayerWithDeprecatedForwardAndFallback);
314 net.addLayerToPrev("testLayer", "CustomType", lp);
315 net.setPreferableBackend(backend);
316 net.setPreferableTarget(target);
318 Mat out = net.forward();
319 normAssert(out, inp + 0.5f, "", 2e-4, 7e-4);
323 LayerFactory::unregisterLayer("CustomType");
326 LayerFactory::unregisterLayer("CustomType");
329 INSTANTIATE_TEST_CASE_P(/**/, DeprecatedForward, dnnBackendsAndTargets());
331 TEST(Net, forwardAndRetrieve)
333 std::string prototxt =
336 " name: \"testLayer\"\n"
338 " bottom: \"data\"\n"
339 " top: \"firstCopy\"\n"
340 " top: \"secondCopy\"\n"
346 Net net = readNetFromCaffe(&prototxt[0], prototxt.size());
347 net.setPreferableBackend(DNN_BACKEND_OPENCV);
349 Mat inp(4, 5, CV_32F);
353 std::vector<String> outNames;
354 outNames.push_back("testLayer");
355 std::vector<std::vector<Mat> > outBlobs;
357 net.forward(outBlobs, outNames);
359 EXPECT_EQ(outBlobs.size(), 1);
360 EXPECT_EQ(outBlobs[0].size(), 2);
361 normAssert(outBlobs[0][0], inp.rowRange(0, 2), "first part");
362 normAssert(outBlobs[0][1], inp.rowRange(2, 4), "second part");
365 #ifdef HAVE_INF_ENGINE
366 static const std::chrono::milliseconds async_timeout(10000);
368 // This test runs network in synchronous mode for different inputs and then
369 // runs the same model asynchronously for the same inputs.
370 typedef testing::TestWithParam<tuple<int, Target> > Async;
371 TEST_P(Async, set_and_forward_single)
373 const int dtype = get<0>(GetParam());
374 const int target = get<1>(GetParam());
376 const std::string suffix = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? "_fp16" : "";
377 const std::string& model = findDataFile("dnn/layers/layer_convolution" + suffix + ".bin");
378 const std::string& proto = findDataFile("dnn/layers/layer_convolution" + suffix + ".xml");
380 Net netSync = readNet(model, proto);
381 netSync.setPreferableTarget(target);
383 Net netAsync = readNet(model, proto);
384 netAsync.setPreferableTarget(target);
387 const int numInputs = 10;
388 std::vector<Mat> inputs(numInputs);
389 int blobSize[] = {2, 6, 75, 113};
390 for (int i = 0; i < numInputs; ++i)
392 inputs[i].create(4, &blobSize[0], dtype);
393 randu(inputs[i], 0, 255);
396 // Run synchronously.
397 std::vector<Mat> refs(numInputs);
398 for (int i = 0; i < numInputs; ++i)
400 netSync.setInput(inputs[i]);
401 refs[i] = netSync.forward().clone();
404 // Run asynchronously. To make test more robust, process inputs in the reversed order.
405 for (int i = numInputs - 1; i >= 0; --i)
407 netAsync.setInput(inputs[i]);
409 AsyncArray out = netAsync.forwardAsync();
410 ASSERT_TRUE(out.valid());
412 EXPECT_TRUE(out.get(result, async_timeout));
413 normAssert(refs[i], result, format("Index: %d", i).c_str(), 0, 0);
417 TEST_P(Async, set_and_forward_all)
419 const int dtype = get<0>(GetParam());
420 const int target = get<1>(GetParam());
422 const std::string suffix = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? "_fp16" : "";
423 const std::string& model = findDataFile("dnn/layers/layer_convolution" + suffix + ".bin");
424 const std::string& proto = findDataFile("dnn/layers/layer_convolution" + suffix + ".xml");
427 Net netSync = readNet(model, proto);
428 netSync.setPreferableTarget(target);
430 Net netAsync = readNet(model, proto);
431 netAsync.setPreferableTarget(target);
434 const int numInputs = 10;
435 std::vector<Mat> inputs(numInputs);
436 int blobSize[] = {2, 6, 75, 113};
437 for (int i = 0; i < numInputs; ++i)
439 inputs[i].create(4, &blobSize[0], dtype);
440 randu(inputs[i], 0, 255);
443 // Run synchronously.
444 std::vector<Mat> refs(numInputs);
445 for (int i = 0; i < numInputs; ++i)
447 netSync.setInput(inputs[i]);
448 refs[i] = netSync.forward().clone();
451 // Run asynchronously. To make test more robust, process inputs in the reversed order.
452 std::vector<AsyncArray> outs(numInputs);
453 for (int i = numInputs - 1; i >= 0; --i)
455 netAsync.setInput(inputs[i]);
456 outs[i] = netAsync.forwardAsync();
459 for (int i = numInputs - 1; i >= 0; --i)
461 ASSERT_TRUE(outs[i].valid());
463 EXPECT_TRUE(outs[i].get(result, async_timeout));
464 normAssert(refs[i], result, format("Index: %d", i).c_str(), 0, 0);
468 INSTANTIATE_TEST_CASE_P(/**/, Async, Combine(
469 Values(CV_32F, CV_8U),
470 testing::ValuesIn(getAvailableTargets(DNN_BACKEND_INFERENCE_ENGINE))
473 typedef testing::TestWithParam<Target> Test_Model_Optimizer;
474 TEST_P(Test_Model_Optimizer, forward_two_nets)
476 const int target = GetParam();
478 const std::string suffix = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? "_fp16" : "";
479 const std::string& model = findDataFile("dnn/layers/layer_convolution" + suffix + ".bin");
480 const std::string& proto = findDataFile("dnn/layers/layer_convolution" + suffix + ".xml");
482 Net net0 = readNet(model, proto);
483 net0.setPreferableTarget(target);
485 Net net1 = readNet(model, proto);
486 net1.setPreferableTarget(target);
489 int blobSize[] = {2, 6, 75, 113};
490 Mat input(4, &blobSize[0], CV_32F);
491 randu(input, 0, 255);
493 net0.setInput(input);
494 Mat ref0 = net0.forward().clone();
496 net1.setInput(input);
497 Mat ref1 = net1.forward();
499 net0.setInput(input);
500 Mat ref2 = net0.forward();
502 normAssert(ref0, ref2, 0, 0);
504 INSTANTIATE_TEST_CASE_P(/**/, Test_Model_Optimizer,
505 testing::ValuesIn(getAvailableTargets(DNN_BACKEND_INFERENCE_ENGINE))
508 #endif // HAVE_INF_ENGINE