9 int64 TestBase::timeLimitDefault = 0;
10 unsigned int TestBase::iterationsLimitDefault = (unsigned int)(-1);
11 int64 TestBase::_timeadjustment = 0;
13 const std::string command_line_keys =
14 "{ |perf_max_outliers |8 |percent of allowed outliers}"
15 "{ |perf_min_samples |10 |minimal required numer of samples}"
16 "{ |perf_force_samples |100 |force set maximum number of samples for all tests}"
17 "{ |perf_seed |809564 |seed for random numbers generator}"
18 "{ |perf_threads |-1 |the number of worker threads, if parallel execution is enabled}"
19 "{ |perf_write_sanity |false |create new records for sanity checks}"
20 "{ |perf_verify_sanity |false |fail tests having no regression data for sanity checks}"
22 "{ |perf_time_limit |6.0 |default time limit for a single test (in seconds)}"
23 "{ |perf_affinity_mask |0 |set affinity mask for the main thread}"
24 "{ |perf_log_power_checkpoints | |additional xml logging for power measurement}"
26 "{ |perf_time_limit |3.0 |default time limit for a single test (in seconds)}"
28 "{ |perf_max_deviation |1.0 |}"
29 "{h |help |false |print help info}"
31 "{ |perf_run_cpu |false |run GPU performance tests for analogical CPU functions}"
32 "{ |perf_cuda_device |0 |run GPU test suite onto specific CUDA capable device}"
33 "{ |perf_cuda_info_only |false |print an information about system and an available CUDA devices and then exit.}"
37 static double param_max_outliers;
38 static double param_max_deviation;
39 static unsigned int param_min_samples;
40 static unsigned int param_force_samples;
41 static uint64 param_seed;
42 static double param_time_limit;
43 static int param_threads;
44 static bool param_write_sanity;
45 static bool param_verify_sanity;
47 static bool param_run_cpu;
48 static int param_cuda_device;
53 static int param_affinity_mask;
54 static bool log_power_checkpoints;
56 #include <sys/syscall.h>
58 static void setCurrentThreadAffinityMask(int mask)
61 int syscallres=syscall(__NR_sched_setaffinity, pid, sizeof(mask), &mask);
65 err=err;//to avoid warnings about unused variables
66 LOGE("Error in the syscall setaffinity: mask=%d=0x%x err=%d=0x%x", mask, mask, err, err);
72 # include <opencv2/core/gpumat.hpp>
77 class PerfEnvironment: public ::testing::Environment
82 cv::setNumThreads(-1);
88 static void randu(cv::Mat& m)
90 const int bigValue = 0x00000FFF;
91 if (m.depth() < CV_32F)
93 int minmax[] = {0, 256};
94 cv::Mat mr = cv::Mat(m.rows, (int)(m.cols * m.elemSize()), CV_8U, m.ptr(), m.step[0]);
95 cv::randu(mr, cv::Mat(1, 1, CV_32S, minmax), cv::Mat(1, 1, CV_32S, minmax + 1));
97 else if (m.depth() == CV_32F)
99 //float minmax[] = {-FLT_MAX, FLT_MAX};
100 float minmax[] = {-bigValue, bigValue};
101 cv::Mat mr = m.reshape(1);
102 cv::randu(mr, cv::Mat(1, 1, CV_32F, minmax), cv::Mat(1, 1, CV_32F, minmax + 1));
106 //double minmax[] = {-DBL_MAX, DBL_MAX};
107 double minmax[] = {-bigValue, bigValue};
108 cv::Mat mr = m.reshape(1);
109 cv::randu(mr, cv::Mat(1, 1, CV_64F, minmax), cv::Mat(1, 1, CV_64F, minmax + 1));
113 /*****************************************************************************************\
114 * inner exception class for early termination
115 \*****************************************************************************************/
117 class PerfEarlyExitException: public cv::Exception {};
119 /*****************************************************************************************\
121 \*****************************************************************************************/
123 Regression& Regression::instance()
125 static Regression single;
129 Regression& Regression::add(TestBase* test, const std::string& name, cv::InputArray array, double eps, ERROR_TYPE err)
131 if(test) test->verified = true;
132 return instance()(name, array, eps, err);
135 Regression& Regression::addKeypoints(TestBase* test, const std::string& name, const std::vector<cv::KeyPoint>& array, double eps, ERROR_TYPE err)
137 int len = (int)array.size();
138 cv::Mat pt (len, 1, CV_32FC2, (void*)&array[0].pt, sizeof(cv::KeyPoint));
139 cv::Mat size (len, 1, CV_32FC1, (void*)&array[0].size, sizeof(cv::KeyPoint));
140 cv::Mat angle (len, 1, CV_32FC1, (void*)&array[0].angle, sizeof(cv::KeyPoint));
141 cv::Mat response(len, 1, CV_32FC1, (void*)&array[0].response, sizeof(cv::KeyPoint));
142 cv::Mat octave (len, 1, CV_32SC1, (void*)&array[0].octave, sizeof(cv::KeyPoint));
143 cv::Mat class_id(len, 1, CV_32SC1, (void*)&array[0].class_id, sizeof(cv::KeyPoint));
145 return Regression::add(test, name + "-pt", pt, eps, ERROR_ABSOLUTE)
146 (name + "-size", size, eps, ERROR_ABSOLUTE)
147 (name + "-angle", angle, eps, ERROR_ABSOLUTE)
148 (name + "-response", response, eps, err)
149 (name + "-octave", octave, eps, ERROR_ABSOLUTE)
150 (name + "-class_id", class_id, eps, ERROR_ABSOLUTE);
153 Regression& Regression::addMatches(TestBase* test, const std::string& name, const std::vector<cv::DMatch>& array, double eps, ERROR_TYPE err)
155 int len = (int)array.size();
156 cv::Mat queryIdx(len, 1, CV_32SC1, (void*)&array[0].queryIdx, sizeof(cv::DMatch));
157 cv::Mat trainIdx(len, 1, CV_32SC1, (void*)&array[0].trainIdx, sizeof(cv::DMatch));
158 cv::Mat imgIdx (len, 1, CV_32SC1, (void*)&array[0].imgIdx, sizeof(cv::DMatch));
159 cv::Mat distance(len, 1, CV_32FC1, (void*)&array[0].distance, sizeof(cv::DMatch));
161 return Regression::add(test, name + "-queryIdx", queryIdx, DBL_EPSILON, ERROR_ABSOLUTE)
162 (name + "-trainIdx", trainIdx, DBL_EPSILON, ERROR_ABSOLUTE)
163 (name + "-imgIdx", imgIdx, DBL_EPSILON, ERROR_ABSOLUTE)
164 (name + "-distance", distance, eps, err);
167 void Regression::Init(const std::string& testSuitName, const std::string& ext)
169 instance().init(testSuitName, ext);
172 void Regression::init(const std::string& testSuitName, const std::string& ext)
174 if (!storageInPath.empty())
176 LOGE("Subsequent initialisation of Regression utility is not allowed.");
180 const char *data_path_dir = getenv("OPENCV_TEST_DATA_PATH");
181 const char *path_separator = "/";
185 int len = (int)strlen(data_path_dir)-1;
186 if (len < 0) len = 0;
187 std::string path_base = (data_path_dir[0] == 0 ? std::string(".") : std::string(data_path_dir))
188 + (data_path_dir[len] == '/' || data_path_dir[len] == '\\' ? "" : path_separator)
192 storageInPath = path_base + testSuitName + ext;
193 storageOutPath = path_base + testSuitName;
197 storageInPath = testSuitName + ext;
198 storageOutPath = testSuitName;
201 suiteName = testSuitName;
205 if (storageIn.open(storageInPath, cv::FileStorage::READ))
207 rootIn = storageIn.root();
208 if (storageInPath.length() > 3 && storageInPath.substr(storageInPath.length()-3) == ".gz")
209 storageOutPath += "_new";
210 storageOutPath += ext;
213 catch(cv::Exception&)
215 LOGE("Failed to open sanity data for reading: %s", storageInPath.c_str());
218 if(!storageIn.isOpened())
219 storageOutPath = storageInPath;
222 Regression::Regression() : regRNG(cv::getTickCount())//this rng should be really random
226 Regression::~Regression()
228 if (storageIn.isOpened())
230 if (storageOut.isOpened())
232 if (!currentTestNodeName.empty())
234 storageOut.release();
238 cv::FileStorage& Regression::write()
240 if (!storageOut.isOpened() && !storageOutPath.empty())
242 int mode = (storageIn.isOpened() && storageInPath == storageOutPath)
243 ? cv::FileStorage::APPEND : cv::FileStorage::WRITE;
244 storageOut.open(storageOutPath, mode);
245 if (!storageOut.isOpened())
247 LOGE("Could not open \"%s\" file for writing", storageOutPath.c_str());
248 storageOutPath.clear();
250 else if (mode == cv::FileStorage::WRITE && !rootIn.empty())
252 //TODO: write content of rootIn node into the storageOut
258 std::string Regression::getCurrentTestNodeName()
260 const ::testing::TestInfo* const test_info =
261 ::testing::UnitTest::GetInstance()->current_test_info();
266 std::string nodename = std::string(test_info->test_case_name()) + "--" + test_info->name();
267 size_t idx = nodename.find_first_of('/');
268 if (idx != std::string::npos)
271 const char* type_param = test_info->type_param();
273 (nodename += "--") += type_param;
275 const char* value_param = test_info->value_param();
276 if (value_param != 0)
277 (nodename += "--") += value_param;
279 for(size_t i = 0; i < nodename.length(); ++i)
280 if (!isalnum(nodename[i]) && '_' != nodename[i])
286 bool Regression::isVector(cv::InputArray a)
288 return a.kind() == cv::_InputArray::STD_VECTOR_MAT || a.kind() == cv::_InputArray::STD_VECTOR_VECTOR;
291 double Regression::getElem(cv::Mat& m, int y, int x, int cn)
295 case CV_8U: return *(m.ptr<unsigned char>(y, x) + cn);
296 case CV_8S: return *(m.ptr<signed char>(y, x) + cn);
297 case CV_16U: return *(m.ptr<unsigned short>(y, x) + cn);
298 case CV_16S: return *(m.ptr<signed short>(y, x) + cn);
299 case CV_32S: return *(m.ptr<signed int>(y, x) + cn);
300 case CV_32F: return *(m.ptr<float>(y, x) + cn);
301 case CV_64F: return *(m.ptr<double>(y, x) + cn);
306 void Regression::write(cv::Mat m)
308 if (!m.empty() && m.dims < 2) return;
311 cv::minMaxIdx(m, &min, &max);
312 write() << "min" << min << "max" << max;
314 write() << "last" << "{" << "x" << m.size.p[1] - 1 << "y" << m.size.p[0] - 1
315 << "val" << getElem(m, m.size.p[0] - 1, m.size.p[1] - 1, m.channels() - 1) << "}";
318 x = regRNG.uniform(0, m.size.p[1]);
319 y = regRNG.uniform(0, m.size.p[0]);
320 cn = regRNG.uniform(0, m.channels());
321 write() << "rng1" << "{" << "x" << x << "y" << y;
322 if(cn > 0) write() << "cn" << cn;
323 write() << "val" << getElem(m, y, x, cn) << "}";
325 x = regRNG.uniform(0, m.size.p[1]);
326 y = regRNG.uniform(0, m.size.p[0]);
327 cn = regRNG.uniform(0, m.channels());
328 write() << "rng2" << "{" << "x" << x << "y" << y;
329 if (cn > 0) write() << "cn" << cn;
330 write() << "val" << getElem(m, y, x, cn) << "}";
333 static double evalEps(double expected, double actual, double _eps, ERROR_TYPE err)
335 if (err == ERROR_ABSOLUTE)
337 else if (err == ERROR_RELATIVE)
338 return std::max(std::abs(expected), std::abs(actual)) * _eps;
342 void Regression::verify(cv::FileNode node, cv::Mat actual, double _eps, std::string argname, ERROR_TYPE err)
344 if (!actual.empty() && actual.dims < 2) return;
346 double actual_min, actual_max;
347 cv::minMaxIdx(actual, &actual_min, &actual_max);
349 double expect_min = (double)node["min"];
350 double eps = evalEps(expect_min, actual_min, _eps, err);
351 ASSERT_NEAR(expect_min, actual_min, eps)
352 << argname << " has unexpected minimal value" << std::endl;
354 double expect_max = (double)node["max"];
355 eps = evalEps(expect_max, actual_max, _eps, err);
356 ASSERT_NEAR(expect_max, actual_max, eps)
357 << argname << " has unexpected maximal value" << std::endl;
359 cv::FileNode last = node["last"];
360 double actual_last = getElem(actual, actual.size.p[0] - 1, actual.size.p[1] - 1, actual.channels() - 1);
361 int expect_cols = (int)last["x"] + 1;
362 int expect_rows = (int)last["y"] + 1;
363 ASSERT_EQ(expect_cols, actual.size.p[1])
364 << argname << " has unexpected number of columns" << std::endl;
365 ASSERT_EQ(expect_rows, actual.size.p[0])
366 << argname << " has unexpected number of rows" << std::endl;
368 double expect_last = (double)last["val"];
369 eps = evalEps(expect_last, actual_last, _eps, err);
370 ASSERT_NEAR(expect_last, actual_last, eps)
371 << argname << " has unexpected value of the last element" << std::endl;
373 cv::FileNode rng1 = node["rng1"];
376 int cn1 = rng1["cn"];
378 double expect_rng1 = (double)rng1["val"];
379 // it is safe to use x1 and y1 without checks here because we have already
380 // verified that mat size is the same as recorded
381 double actual_rng1 = getElem(actual, y1, x1, cn1);
383 eps = evalEps(expect_rng1, actual_rng1, _eps, err);
384 ASSERT_NEAR(expect_rng1, actual_rng1, eps)
385 << argname << " has unexpected value of the ["<< x1 << ":" << y1 << ":" << cn1 <<"] element" << std::endl;
387 cv::FileNode rng2 = node["rng2"];
390 int cn2 = rng2["cn"];
392 double expect_rng2 = (double)rng2["val"];
393 double actual_rng2 = getElem(actual, y2, x2, cn2);
395 eps = evalEps(expect_rng2, actual_rng2, _eps, err);
396 ASSERT_NEAR(expect_rng2, actual_rng2, eps)
397 << argname << " has unexpected value of the ["<< x2 << ":" << y2 << ":" << cn2 <<"] element" << std::endl;
400 void Regression::write(cv::InputArray array)
402 write() << "kind" << array.kind();
403 write() << "type" << array.type();
406 int total = (int)array.total();
407 int idx = regRNG.uniform(0, total);
408 write() << "len" << total;
409 write() << "idx" << idx;
411 cv::Mat m = array.getMat(idx);
413 if (m.total() * m.channels() < 26) //5x5 or smaller
414 write() << "val" << m;
420 if (array.total() * array.channels() < 26) //5x5 or smaller
421 write() << "val" << array.getMat();
423 write(array.getMat());
427 static int countViolations(const cv::Mat& expected, const cv::Mat& actual, const cv::Mat& diff, double eps, double* max_violation = 0, double* max_allowed = 0)
430 diff.reshape(1).convertTo(diff64f, CV_64F);
432 cv::Mat expected_abs = cv::abs(expected.reshape(1));
433 cv::Mat actual_abs = cv::abs(actual.reshape(1));
434 cv::Mat maximum, mask;
435 cv::max(expected_abs, actual_abs, maximum);
436 cv::multiply(maximum, cv::Vec<double, 1>(eps), maximum, CV_64F);
437 cv::compare(diff64f, maximum, mask, cv::CMP_GT);
439 int v = cv::countNonZero(mask);
441 if (v > 0 && max_violation != 0 && max_allowed != 0)
444 cv::minMaxIdx(maximum, 0, max_allowed, 0, loc, mask);
445 *max_violation = diff64f.at<double>(loc[1], loc[0]);
451 void Regression::verify(cv::FileNode node, cv::InputArray array, double eps, ERROR_TYPE err)
453 int expected_kind = (int)node["kind"];
454 int expected_type = (int)node["type"];
455 ASSERT_EQ(expected_kind, array.kind()) << " Argument \"" << node.name() << "\" has unexpected kind";
456 ASSERT_EQ(expected_type, array.type()) << " Argument \"" << node.name() << "\" has unexpected type";
458 cv::FileNode valnode = node["val"];
461 int expected_length = (int)node["len"];
462 ASSERT_EQ(expected_length, (int)array.total()) << " Vector \"" << node.name() << "\" has unexpected length";
463 int idx = node["idx"];
465 cv::Mat actual = array.getMat(idx);
467 if (valnode.isNone())
469 ASSERT_LE((size_t)26, actual.total() * (size_t)actual.channels())
470 << " \"" << node.name() << "[" << idx << "]\" has unexpected number of elements";
471 verify(node, actual, eps, cv::format("%s[%d]", node.name().c_str(), idx), err);
480 ASSERT_TRUE(actual.empty())
481 << " expected empty " << node.name() << "[" << idx<< "]";
485 ASSERT_EQ(expected.size(), actual.size())
486 << " " << node.name() << "[" << idx<< "] has unexpected size";
489 cv::absdiff(expected, actual, diff);
491 if (err == ERROR_ABSOLUTE)
493 if (!cv::checkRange(diff, true, 0, 0, eps))
495 if(expected.total() * expected.channels() < 12)
496 std::cout << " Expected: " << std::endl << expected << std::endl << " Actual:" << std::endl << actual << std::endl;
499 cv::minMaxIdx(diff.reshape(1), 0, &max);
501 FAIL() << " Absolute difference (=" << max << ") between argument \""
502 << node.name() << "[" << idx << "]\" and expected value is greater than " << eps;
505 else if (err == ERROR_RELATIVE)
508 int violations = countViolations(expected, actual, diff, eps, &maxv, &maxa);
511 FAIL() << " Relative difference (" << maxv << " of " << maxa << " allowed) between argument \""
512 << node.name() << "[" << idx << "]\" and expected value is greater than " << eps << " in " << violations << " points";
520 if (valnode.isNone())
522 ASSERT_LE((size_t)26, array.total() * (size_t)array.channels())
523 << " Argument \"" << node.name() << "\" has unexpected number of elements";
524 verify(node, array.getMat(), eps, "Argument \"" + node.name() + "\"", err);
530 cv::Mat actual = array.getMat();
534 ASSERT_TRUE(actual.empty())
535 << " expected empty " << node.name();
539 ASSERT_EQ(expected.size(), actual.size())
540 << " Argument \"" << node.name() << "\" has unexpected size";
543 cv::absdiff(expected, actual, diff);
545 if (err == ERROR_ABSOLUTE)
547 if (!cv::checkRange(diff, true, 0, 0, eps))
549 if(expected.total() * expected.channels() < 12)
550 std::cout << " Expected: " << std::endl << expected << std::endl << " Actual:" << std::endl << actual << std::endl;
553 cv::minMaxIdx(diff.reshape(1), 0, &max);
555 FAIL() << " Difference (=" << max << ") between argument1 \"" << node.name()
556 << "\" and expected value is greater than " << eps;
559 else if (err == ERROR_RELATIVE)
562 int violations = countViolations(expected, actual, diff, eps, &maxv, &maxa);
565 FAIL() << " Relative difference (" << maxv << " of " << maxa << " allowed) between argument \"" << node.name()
566 << "\" and expected value is greater than " << eps << " in " << violations << " points";
574 Regression& Regression::operator() (const std::string& name, cv::InputArray array, double eps, ERROR_TYPE err)
576 // exit if current test is already failed
577 if(::testing::UnitTest::GetInstance()->current_test_info()->result()->Failed()) return *this;
579 if(!array.empty() && array.depth() == CV_USRTYPE1)
581 ADD_FAILURE() << " Can not check regression for CV_USRTYPE1 data type for " << name;
585 std::string nodename = getCurrentTestNodeName();
588 static const std::string prefix = (param_run_cpu)? "CPU_" : "GPU_";
589 if(suiteName == "gpu")
590 nodename = prefix + nodename;
593 cv::FileNode n = rootIn[nodename];
596 if(param_write_sanity)
598 if (nodename != currentTestNodeName)
600 if (!currentTestNodeName.empty())
602 currentTestNodeName = nodename;
604 write() << nodename << "{";
606 // TODO: verify that name is alphanumeric, current error message is useless
607 write() << name << "{";
611 else if(param_verify_sanity)
613 ADD_FAILURE() << " No regression data for " << name << " argument";
618 cv::FileNode this_arg = n[name];
619 if (!this_arg.isMap())
620 ADD_FAILURE() << " No regression data for " << name << " argument";
622 verify(this_arg, array, eps, err);
629 /*****************************************************************************************\
630 * ::perf::performance_metrics
631 \*****************************************************************************************/
632 performance_metrics::performance_metrics()
645 terminationReason = TERM_UNKNOWN;
649 /*****************************************************************************************\
651 \*****************************************************************************************/
654 void TestBase::Init(int argc, const char* const argv[])
656 cv::CommandLineParser args(argc, argv, command_line_keys.c_str());
657 if (args.get<bool>("help"))
664 ::testing::AddGlobalTestEnvironment(new PerfEnvironment);
666 param_max_outliers = std::min(100., std::max(0., args.get<double>("perf_max_outliers")));
667 param_min_samples = std::max(1u, args.get<unsigned int>("perf_min_samples"));
668 param_max_deviation = std::max(0., args.get<double>("perf_max_deviation"));
669 param_seed = args.get<uint64>("perf_seed");
670 param_time_limit = std::max(0., args.get<double>("perf_time_limit"));
671 param_force_samples = args.get<unsigned int>("perf_force_samples");
672 param_write_sanity = args.get<bool>("perf_write_sanity");
673 param_verify_sanity = args.get<bool>("perf_verify_sanity");
674 param_threads = args.get<int>("perf_threads");
676 param_affinity_mask = args.get<int>("perf_affinity_mask");
677 log_power_checkpoints = args.get<bool>("perf_log_power_checkpoints");
682 bool printOnly = args.get<bool>("perf_cuda_info_only");
687 param_run_cpu = args.get<bool>("perf_run_cpu");
688 param_cuda_device = std::max(0, std::min(cv::gpu::getCudaEnabledDeviceCount(), args.get<int>("perf_cuda_device")));
691 printf("[----------]\n[ GPU INFO ] \tRun test suite on CPU.\n[----------]\n"), fflush(stdout);
694 cv::gpu::DeviceInfo info(param_cuda_device);
695 if (!info.isCompatible())
697 printf("[----------]\n[ FAILURE ] \tDevice %s is NOT compatible with current GPU module build.\n[----------]\n", info.name().c_str()), fflush(stdout);
701 cv::gpu::setDevice(param_cuda_device);
703 printf("[----------]\n[ GPU INFO ] \tRun test suite on %s GPU.\n[----------]\n", info.name().c_str()), fflush(stdout);
707 // if (!args.check())
709 // args.printErrors();
713 timeLimitDefault = param_time_limit == 0.0 ? 1 : (int64)(param_time_limit * cv::getTickFrequency());
714 iterationsLimitDefault = param_force_samples == 0 ? (unsigned)(-1) : param_force_samples;
715 _timeadjustment = _calibrate();
718 int64 TestBase::_calibrate()
720 class _helper : public ::perf::TestBase
723 performance_metrics& getMetrics() { return calcMetrics(); }
724 virtual void TestBody() {}
725 virtual void PerfTestBody()
727 //the whole system warmup
729 cv::Mat a(2048, 2048, CV_32S, cv::Scalar(1));
730 cv::Mat b(2048, 2048, CV_32S, cv::Scalar(2));
733 for(declare.iterations(20); startTimer(), next(); stopTimer())
739 for(declare.iterations(1000); startTimer(), next(); stopTimer()){}
746 double compensation = h.getMetrics().min;
747 LOGD("Time compensation is %.0f", compensation);
748 return (int64)compensation;
752 # pragma warning(push)
753 # pragma warning(disable:4355) // 'this' : used in base member initializer list
755 TestBase::TestBase(): declare(this)
759 # pragma warning(pop)
763 void TestBase::declareArray(SizeVector& sizes, cv::InputOutputArray a, int wtype)
767 sizes.push_back(std::pair<int, cv::Size>(getSizeInBytes(a), getSize(a)));
770 else if (a.kind() != cv::_InputArray::NONE)
771 ADD_FAILURE() << " Uninitialized input/output parameters are not allowed for performance tests";
774 void TestBase::warmup(cv::InputOutputArray a, int wtype)
776 if (a.empty()) return;
777 if (a.kind() != cv::_InputArray::STD_VECTOR_MAT && a.kind() != cv::_InputArray::STD_VECTOR_VECTOR)
778 warmup_impl(a.getMat(), wtype);
781 size_t total = a.total();
782 for (size_t i = 0; i < total; ++i)
783 warmup_impl(a.getMat((int)i), wtype);
787 int TestBase::getSizeInBytes(cv::InputArray a)
789 if (a.empty()) return 0;
790 int total = (int)a.total();
791 if (a.kind() != cv::_InputArray::STD_VECTOR_MAT && a.kind() != cv::_InputArray::STD_VECTOR_VECTOR)
792 return total * CV_ELEM_SIZE(a.type());
795 for (int i = 0; i < total; ++i)
796 size += (int)a.total(i) * CV_ELEM_SIZE(a.type(i));
801 cv::Size TestBase::getSize(cv::InputArray a)
803 if (a.kind() != cv::_InputArray::STD_VECTOR_MAT && a.kind() != cv::_InputArray::STD_VECTOR_VECTOR)
808 bool TestBase::next()
810 bool has_next = ++currentIter < nIters && totalTime < timeLimit;
811 cv::theRNG().state = param_seed; //this rng should generate same numbers for each run
814 if (log_power_checkpoints)
817 gettimeofday(&tim, NULL);
818 unsigned long long t1 = tim.tv_sec * 1000LLU + (unsigned long long)(tim.tv_usec / 1000.f);
820 if (currentIter == 1) RecordProperty("test_start", cv::format("%llu",t1).c_str());
821 if (!has_next) RecordProperty("test_complete", cv::format("%llu",t1).c_str());
827 void TestBase::warmup_impl(cv::Mat m, int wtype)
832 cv::sum(m.reshape(1));
835 m.reshape(1).setTo(cv::Scalar::all(0));
845 unsigned int TestBase::getTotalInputSize() const
847 unsigned int res = 0;
848 for (SizeVector::const_iterator i = inputData.begin(); i != inputData.end(); ++i)
853 unsigned int TestBase::getTotalOutputSize() const
855 unsigned int res = 0;
856 for (SizeVector::const_iterator i = outputData.begin(); i != outputData.end(); ++i)
861 void TestBase::startTimer()
863 lastTime = cv::getTickCount();
866 void TestBase::stopTimer()
868 int64 time = cv::getTickCount();
870 ADD_FAILURE() << " stopTimer() is called before startTimer()";
871 lastTime = time - lastTime;
872 totalTime += lastTime;
873 lastTime -= _timeadjustment;
874 if (lastTime < 0) lastTime = 0;
875 times.push_back(lastTime);
879 performance_metrics& TestBase::calcMetrics()
881 if ((metrics.samples == (unsigned int)currentIter) || times.size() == 0)
884 metrics.bytesIn = getTotalInputSize();
885 metrics.bytesOut = getTotalOutputSize();
886 metrics.frequency = cv::getTickFrequency();
887 metrics.samples = (unsigned int)times.size();
888 metrics.outliers = 0;
890 if (metrics.terminationReason != performance_metrics::TERM_INTERRUPT && metrics.terminationReason != performance_metrics::TERM_EXCEPTION)
892 if (currentIter == nIters)
893 metrics.terminationReason = performance_metrics::TERM_ITERATIONS;
894 else if (totalTime >= timeLimit)
895 metrics.terminationReason = performance_metrics::TERM_TIME;
897 metrics.terminationReason = performance_metrics::TERM_UNKNOWN;
900 std::sort(times.begin(), times.end());
902 //estimate mean and stddev for log(time)
906 for(TimeVector::const_iterator i = times.begin(); i != times.end(); ++i)
908 double x = static_cast<double>(*i)/runsPerIteration;
909 if (x < DBL_EPSILON) continue;
913 double delta = lx - gmean;
915 gstddev += delta * (lx - gmean);
918 gstddev = n > 1 ? sqrt(gstddev / (n - 1)) : 0;
920 TimeVector::const_iterator start = times.begin();
921 TimeVector::const_iterator end = times.end();
923 //filter outliers assuming log-normal distribution
924 //http://stackoverflow.com/questions/1867426/modeling-distribution-of-performance-measurements
926 if (gstddev > DBL_EPSILON)
928 double minout = exp(gmean - 3 * gstddev) * runsPerIteration;
929 double maxout = exp(gmean + 3 * gstddev) * runsPerIteration;
930 while(*start < minout) ++start, ++metrics.outliers, ++offset;
931 do --end, ++metrics.outliers; while(*end > maxout);
932 ++end, --metrics.outliers;
935 metrics.min = static_cast<double>(*start)/runsPerIteration;
943 for(; start != end; ++start)
945 double x = static_cast<double>(*start)/runsPerIteration;
950 double gdelta = lx - gmean;
952 gstddev += gdelta * (lx - gmean);
955 double delta = x - mean;
957 stddev += delta * (x - mean);
961 metrics.gmean = exp(gmean);
962 metrics.gstddev = m > 1 ? sqrt(gstddev / (m - 1)) : 0;
963 metrics.stddev = n > 1 ? sqrt(stddev / (n - 1)) : 0;
964 metrics.median = n % 2
965 ? (double)times[offset + n / 2]
966 : 0.5 * (times[offset + n / 2] + times[offset + n / 2 - 1]);
968 metrics.median /= runsPerIteration;
973 void TestBase::validateMetrics()
975 performance_metrics& m = calcMetrics();
977 if (HasFailure()) return;
979 ASSERT_GE(m.samples, 1u)
980 << " No time measurements was performed.\nstartTimer() and stopTimer() commands are required for performance tests.";
982 EXPECT_GE(m.samples, param_min_samples)
983 << " Only a few samples are collected.\nPlease increase number of iterations or/and time limit to get reliable performance measurements.";
985 if (m.gstddev > DBL_EPSILON)
987 EXPECT_GT(/*m.gmean * */1., /*m.gmean * */ 2 * sinh(m.gstddev * param_max_deviation))
988 << " Test results are not reliable ((mean-sigma,mean+sigma) deviation interval is greater than measured time interval).";
991 EXPECT_LE(m.outliers, std::max((unsigned int)cvCeil(m.samples * param_max_outliers / 100.), 1u))
992 << " Test results are not reliable (too many outliers).";
995 void TestBase::reportMetrics(bool toJUnitXML)
997 performance_metrics& m = calcMetrics();
1001 RecordProperty("bytesIn", (int)m.bytesIn);
1002 RecordProperty("bytesOut", (int)m.bytesOut);
1003 RecordProperty("term", m.terminationReason);
1004 RecordProperty("samples", (int)m.samples);
1005 RecordProperty("outliers", (int)m.outliers);
1006 RecordProperty("frequency", cv::format("%.0f", m.frequency).c_str());
1007 RecordProperty("min", cv::format("%.0f", m.min).c_str());
1008 RecordProperty("median", cv::format("%.0f", m.median).c_str());
1009 RecordProperty("gmean", cv::format("%.0f", m.gmean).c_str());
1010 RecordProperty("gstddev", cv::format("%.6f", m.gstddev).c_str());
1011 RecordProperty("mean", cv::format("%.0f", m.mean).c_str());
1012 RecordProperty("stddev", cv::format("%.0f", m.stddev).c_str());
1016 const ::testing::TestInfo* const test_info = ::testing::UnitTest::GetInstance()->current_test_info();
1017 const char* type_param = test_info->type_param();
1018 const char* value_param = test_info->value_param();
1020 #if defined(ANDROID) && defined(USE_ANDROID_LOGGING)
1021 LOGD("[ FAILED ] %s.%s", test_info->test_case_name(), test_info->name());
1024 if (type_param) LOGD("type = %11s", type_param);
1025 if (value_param) LOGD("params = %11s", value_param);
1027 switch (m.terminationReason)
1029 case performance_metrics::TERM_ITERATIONS:
1030 LOGD("termination reason: reached maximum number of iterations");
1032 case performance_metrics::TERM_TIME:
1033 LOGD("termination reason: reached time limit");
1035 case performance_metrics::TERM_INTERRUPT:
1036 LOGD("termination reason: aborted by the performance testing framework");
1038 case performance_metrics::TERM_EXCEPTION:
1039 LOGD("termination reason: unhandled exception");
1041 case performance_metrics::TERM_UNKNOWN:
1043 LOGD("termination reason: unknown");
1047 LOGD("bytesIn =%11lu", (unsigned long)m.bytesIn);
1048 LOGD("bytesOut =%11lu", (unsigned long)m.bytesOut);
1049 if (nIters == (unsigned int)-1 || m.terminationReason == performance_metrics::TERM_ITERATIONS)
1050 LOGD("samples =%11u", m.samples);
1052 LOGD("samples =%11u of %u", m.samples, nIters);
1053 LOGD("outliers =%11u", m.outliers);
1054 LOGD("frequency =%11.0f", m.frequency);
1057 LOGD("min =%11.0f = %.2fms", m.min, m.min * 1e3 / m.frequency);
1058 LOGD("median =%11.0f = %.2fms", m.median, m.median * 1e3 / m.frequency);
1059 LOGD("gmean =%11.0f = %.2fms", m.gmean, m.gmean * 1e3 / m.frequency);
1060 LOGD("gstddev =%11.8f = %.2fms for 97%% dispersion interval", m.gstddev, m.gmean * 2 * sinh(m.gstddev * 3) * 1e3 / m.frequency);
1061 LOGD("mean =%11.0f = %.2fms", m.mean, m.mean * 1e3 / m.frequency);
1062 LOGD("stddev =%11.0f = %.2fms", m.stddev, m.stddev * 1e3 / m.frequency);
1067 void TestBase::SetUp()
1069 cv::theRNG().state = param_seed; // this rng should generate same numbers for each run
1071 if (param_threads >= 0)
1072 cv::setNumThreads(param_threads);
1075 if (param_affinity_mask)
1076 setCurrentThreadAffinityMask(param_affinity_mask);
1082 runsPerIteration = 1;
1083 nIters = iterationsLimitDefault;
1084 currentIter = (unsigned int)-1;
1085 timeLimit = timeLimitDefault;
1089 void TestBase::TearDown()
1091 if (!HasFailure() && !verified)
1092 ADD_FAILURE() << "The test has no sanity checks. There should be at least one check at the end of performance test.";
1096 reportMetrics(false);
1099 const ::testing::TestInfo* const test_info = ::testing::UnitTest::GetInstance()->current_test_info();
1100 const char* type_param = test_info->type_param();
1101 const char* value_param = test_info->value_param();
1102 if (value_param) printf("[ VALUE ] \t%s\n", value_param), fflush(stdout);
1103 if (type_param) printf("[ TYPE ] \t%s\n", type_param), fflush(stdout);
1104 reportMetrics(true);
1108 std::string TestBase::getDataPath(const std::string& relativePath)
1110 if (relativePath.empty())
1112 ADD_FAILURE() << " Bad path to test resource";
1113 throw PerfEarlyExitException();
1116 const char *data_path_dir = getenv("OPENCV_TEST_DATA_PATH");
1117 const char *path_separator = "/";
1122 int len = (int)strlen(data_path_dir) - 1;
1123 if (len < 0) len = 0;
1124 path = (data_path_dir[0] == 0 ? std::string(".") : std::string(data_path_dir))
1125 + (data_path_dir[len] == '/' || data_path_dir[len] == '\\' ? "" : path_separator);
1130 path += path_separator;
1133 if (relativePath[0] == '/' || relativePath[0] == '\\')
1134 path += relativePath.substr(1);
1136 path += relativePath;
1138 FILE* fp = fopen(path.c_str(), "r");
1143 ADD_FAILURE() << " Requested file \"" << path << "\" does not exist.";
1144 throw PerfEarlyExitException();
1149 void TestBase::RunPerfTestBody()
1153 this->PerfTestBody();
1155 catch(PerfEarlyExitException)
1157 metrics.terminationReason = performance_metrics::TERM_INTERRUPT;
1158 return;//no additional failure logging
1160 catch(cv::Exception e)
1162 metrics.terminationReason = performance_metrics::TERM_EXCEPTION;
1163 FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws cv::Exception:\n " << e.what();
1165 catch(std::exception e)
1167 metrics.terminationReason = performance_metrics::TERM_EXCEPTION;
1168 FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws std::exception:\n " << e.what();
1172 metrics.terminationReason = performance_metrics::TERM_EXCEPTION;
1173 FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws...";
1177 /*****************************************************************************************\
1178 * ::perf::TestBase::_declareHelper
1179 \*****************************************************************************************/
1180 TestBase::_declareHelper& TestBase::_declareHelper::iterations(unsigned int n)
1182 test->times.clear();
1183 test->times.reserve(n);
1184 test->nIters = std::min(n, TestBase::iterationsLimitDefault);
1185 test->currentIter = (unsigned int)-1;
1189 TestBase::_declareHelper& TestBase::_declareHelper::time(double timeLimitSecs)
1191 test->times.clear();
1192 test->currentIter = (unsigned int)-1;
1193 test->timeLimit = (int64)(timeLimitSecs * cv::getTickFrequency());
1197 TestBase::_declareHelper& TestBase::_declareHelper::tbb_threads(int n)
1199 cv::setNumThreads(n);
1203 TestBase::_declareHelper& TestBase::_declareHelper::runs(unsigned int runsNumber)
1205 test->runsPerIteration = runsNumber;
1209 TestBase::_declareHelper& TestBase::_declareHelper::in(cv::InputOutputArray a1, int wtype)
1211 if (!test->times.empty()) return *this;
1212 TestBase::declareArray(test->inputData, a1, wtype);
1216 TestBase::_declareHelper& TestBase::_declareHelper::in(cv::InputOutputArray a1, cv::InputOutputArray a2, int wtype)
1218 if (!test->times.empty()) return *this;
1219 TestBase::declareArray(test->inputData, a1, wtype);
1220 TestBase::declareArray(test->inputData, a2, wtype);
1224 TestBase::_declareHelper& TestBase::_declareHelper::in(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, int wtype)
1226 if (!test->times.empty()) return *this;
1227 TestBase::declareArray(test->inputData, a1, wtype);
1228 TestBase::declareArray(test->inputData, a2, wtype);
1229 TestBase::declareArray(test->inputData, a3, wtype);
1233 TestBase::_declareHelper& TestBase::_declareHelper::in(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, cv::InputOutputArray a4, int wtype)
1235 if (!test->times.empty()) return *this;
1236 TestBase::declareArray(test->inputData, a1, wtype);
1237 TestBase::declareArray(test->inputData, a2, wtype);
1238 TestBase::declareArray(test->inputData, a3, wtype);
1239 TestBase::declareArray(test->inputData, a4, wtype);
1243 TestBase::_declareHelper& TestBase::_declareHelper::out(cv::InputOutputArray a1, int wtype)
1245 if (!test->times.empty()) return *this;
1246 TestBase::declareArray(test->outputData, a1, wtype);
1250 TestBase::_declareHelper& TestBase::_declareHelper::out(cv::InputOutputArray a1, cv::InputOutputArray a2, int wtype)
1252 if (!test->times.empty()) return *this;
1253 TestBase::declareArray(test->outputData, a1, wtype);
1254 TestBase::declareArray(test->outputData, a2, wtype);
1258 TestBase::_declareHelper& TestBase::_declareHelper::out(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, int wtype)
1260 if (!test->times.empty()) return *this;
1261 TestBase::declareArray(test->outputData, a1, wtype);
1262 TestBase::declareArray(test->outputData, a2, wtype);
1263 TestBase::declareArray(test->outputData, a3, wtype);
1267 TestBase::_declareHelper& TestBase::_declareHelper::out(cv::InputOutputArray a1, cv::InputOutputArray a2, cv::InputOutputArray a3, cv::InputOutputArray a4, int wtype)
1269 if (!test->times.empty()) return *this;
1270 TestBase::declareArray(test->outputData, a1, wtype);
1271 TestBase::declareArray(test->outputData, a2, wtype);
1272 TestBase::declareArray(test->outputData, a3, wtype);
1273 TestBase::declareArray(test->outputData, a4, wtype);
1277 TestBase::_declareHelper::_declareHelper(TestBase* t) : test(t)
1281 /*****************************************************************************************\
1283 \*****************************************************************************************/
1286 struct KeypointComparator
1288 std::vector<cv::KeyPoint>& pts_;
1289 comparators::KeypointGreater cmp;
1291 KeypointComparator(std::vector<cv::KeyPoint>& pts) : pts_(pts), cmp() {}
1293 bool operator()(int idx1, int idx2) const
1295 return cmp(pts_[idx1], pts_[idx2]);
1298 const KeypointComparator& operator=(const KeypointComparator&); // quiet MSVC
1302 void perf::sort(std::vector<cv::KeyPoint>& pts, cv::InputOutputArray descriptors)
1304 cv::Mat desc = descriptors.getMat();
1306 CV_Assert(pts.size() == (size_t)desc.rows);
1307 cv::AutoBuffer<int> idxs(desc.rows);
1309 for (int i = 0; i < desc.rows; ++i)
1312 std::sort((int*)idxs, (int*)idxs + desc.rows, KeypointComparator(pts));
1314 std::vector<cv::KeyPoint> spts(pts.size());
1315 cv::Mat sdesc(desc.size(), desc.type());
1317 for(int j = 0; j < desc.rows; ++j)
1319 spts[j] = pts[idxs[j]];
1320 cv::Mat row = sdesc.row(j);
1321 desc.row(idxs[j]).copyTo(row);
1328 /*****************************************************************************************\
1330 \*****************************************************************************************/
1332 bool perf::GpuPerf::targetDevice()
1334 return !param_run_cpu;
1338 /*****************************************************************************************\
1340 \*****************************************************************************************/
1344 void PrintTo(const MatType& t, ::std::ostream* os)
1346 switch( CV_MAT_DEPTH((int)t) )
1348 case CV_8U: *os << "8U"; break;
1349 case CV_8S: *os << "8S"; break;
1350 case CV_16U: *os << "16U"; break;
1351 case CV_16S: *os << "16S"; break;
1352 case CV_32S: *os << "32S"; break;
1353 case CV_32F: *os << "32F"; break;
1354 case CV_64F: *os << "64F"; break;
1355 case CV_USRTYPE1: *os << "USRTYPE1"; break;
1356 default: *os << "INVALID_TYPE"; break;
1358 *os << 'C' << CV_MAT_CN((int)t);
1363 /*****************************************************************************************\
1365 \*****************************************************************************************/
1368 void PrintTo(const Size& sz, ::std::ostream* os)
1370 *os << /*"Size:" << */sz.width << "x" << sz.height;