\*****************************************************************************************/
enum PERF_STRATEGY
{
+ PERF_STRATEGY_DEFAULT = -1,
PERF_STRATEGY_BASE = 0,
PERF_STRATEGY_SIMPLE = 1,
};
static std::string getDataPath(const std::string& relativePath);
static std::string getSelectedImpl();
- static enum PERF_STRATEGY getPerformanceStrategy();
- static enum PERF_STRATEGY setPerformanceStrategy(enum PERF_STRATEGY strategy);
+ static enum PERF_STRATEGY getCurrentModulePerformanceStrategy();
+ static enum PERF_STRATEGY setModulePerformanceStrategy(enum PERF_STRATEGY strategy);
class PerfSkipTestException: public cv::Exception {};
void stopTimer();
bool next();
- //_declareHelper declare;
+ PERF_STRATEGY getCurrentPerformanceStrategy() const;
enum WarmUpType
{
static void warmup(cv::InputOutputArray a, WarmUpType wtype = WARMUP_READ);
performance_metrics& calcMetrics();
+
void RunPerfTestBody();
private:
typedef std::vector<std::pair<int, cv::Size> > SizeVector;
unsigned int getTotalInputSize() const;
unsigned int getTotalOutputSize() const;
+ enum PERF_STRATEGY testStrategy;
+
TimeVector times;
int64 lastTime;
int64 totalTime;
_declareHelper& time(double timeLimitSecs);
_declareHelper& tbb_threads(int n = -1);
_declareHelper& runs(unsigned int runsNumber);
+
+ _declareHelper& strategy(enum PERF_STRATEGY s);
private:
TestBase* test;
_declareHelper(TestBase* t);
static std::string param_impl;
-static enum PERF_STRATEGY param_strategy = PERF_STRATEGY_BASE;
+static enum PERF_STRATEGY strategyForce = PERF_STRATEGY_DEFAULT;
+static enum PERF_STRATEGY strategyModule = PERF_STRATEGY_BASE;
static double param_max_outliers;
static double param_max_deviation;
}
else if (perf_strategy == "base")
{
- param_strategy = PERF_STRATEGY_BASE;
+ strategyForce = PERF_STRATEGY_BASE;
}
else if (perf_strategy == "simple")
{
- param_strategy = PERF_STRATEGY_SIMPLE;
+ strategyForce = PERF_STRATEGY_SIMPLE;
}
else
{
return param_impl;
}
-enum PERF_STRATEGY TestBase::getPerformanceStrategy()
+enum PERF_STRATEGY TestBase::setModulePerformanceStrategy(enum PERF_STRATEGY strategy)
{
- return param_strategy;
+ enum PERF_STRATEGY ret = strategyModule;
+ strategyModule = strategy;
+ return ret;
}
-enum PERF_STRATEGY TestBase::setPerformanceStrategy(enum PERF_STRATEGY strategy)
+enum PERF_STRATEGY TestBase::getCurrentModulePerformanceStrategy()
{
- enum PERF_STRATEGY ret = param_strategy;
- param_strategy = strategy;
- return ret;
+ return strategyForce == PERF_STRATEGY_DEFAULT ? strategyModule : strategyForce;
}
_helper h;
h.PerfTestBody();
double compensation = h.getMetrics().min;
- if (param_strategy == PERF_STRATEGY_SIMPLE)
+ if (getCurrentModulePerformanceStrategy() == PERF_STRATEGY_SIMPLE)
{
CV_Assert(compensation < 0.01 * cv::getTickFrequency());
compensation = 0.0f; // simple strategy doesn't require any compensation
# pragma warning(push)
# pragma warning(disable:4355) // 'this' : used in base member initializer list
#endif
-TestBase::TestBase(): declare(this)
+TestBase::TestBase(): testStrategy(PERF_STRATEGY_DEFAULT), declare(this)
{
}
#ifdef _MSC_VER
return cv::Size();
}
+PERF_STRATEGY TestBase::getCurrentPerformanceStrategy() const
+{
+ if (strategyForce == PERF_STRATEGY_DEFAULT)
+ return (testStrategy == PERF_STRATEGY_DEFAULT) ? strategyModule : testStrategy;
+ else
+ return strategyForce;
+}
+
bool TestBase::next()
{
static int64 lastActivityPrintTime = 0;
break;
}
- if (param_strategy == PERF_STRATEGY_BASE)
+ if (getCurrentPerformanceStrategy() == PERF_STRATEGY_BASE)
{
has_next = currentIter < nIters && totalTime < timeLimit;
}
else
{
- assert(param_strategy == PERF_STRATEGY_SIMPLE);
+ assert(getCurrentPerformanceStrategy() == PERF_STRATEGY_SIMPLE);
if (totalTime - lastActivityPrintTime >= cv::getTickFrequency() * 10)
{
std::cout << '.' << std::endl;
TimeVector::const_iterator start = times.begin();
TimeVector::const_iterator end = times.end();
- if (param_strategy == PERF_STRATEGY_BASE)
+ if (getCurrentPerformanceStrategy() == PERF_STRATEGY_BASE)
{
//estimate mean and stddev for log(time)
double gmean = 0;
++end, --metrics.outliers;
}
}
- else if (param_strategy == PERF_STRATEGY_SIMPLE)
+ else if (getCurrentPerformanceStrategy() == PERF_STRATEGY_SIMPLE)
{
metrics.outliers = static_cast<int>(times.size() * param_max_outliers / 100);
for (unsigned int i = 0; i < metrics.outliers; i++)
ASSERT_GE(m.samples, 1u)
<< " No time measurements was performed.\nstartTimer() and stopTimer() commands are required for performance tests.";
- if (param_strategy == PERF_STRATEGY_BASE)
+ if (getCurrentPerformanceStrategy() == PERF_STRATEGY_BASE)
{
EXPECT_GE(m.samples, param_min_samples)
<< " Only a few samples are collected.\nPlease increase number of iterations or/and time limit to get reliable performance measurements.";
EXPECT_LE(m.outliers, std::max((unsigned int)cvCeil(m.samples * param_max_outliers / 100.), 1u))
<< " Test results are not reliable (too many outliers).";
}
- else if (param_strategy == PERF_STRATEGY_SIMPLE)
+ else if (getCurrentPerformanceStrategy() == PERF_STRATEGY_SIMPLE)
{
double mean = metrics.mean * 1000.0f / metrics.frequency;
double stddev = metrics.stddev * 1000.0f / metrics.frequency;
return *this;
}
+TestBase::_declareHelper& TestBase::_declareHelper::strategy(enum PERF_STRATEGY s)
+{
+ test->testStrategy = s;
+ return *this;
+}
+
TestBase::_declareHelper::_declareHelper(TestBase* t) : test(t)
{
}