2 * Copyright (c) 2011-2015 Samsung Electronics Co., Ltd All Rights Reserved
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
18 * @author Przemyslaw Dobrowolski (p.dobrowolsk@samsung.com)
19 * @author Lukasz Wrzosek (l.wrzosek@samsung.com)
21 * @brief This file is the header file of test runner
24 #ifndef DPL_TEST_RUNNER_H
25 #define DPL_TEST_RUNNER_H
39 #include <dpl/atomic.h>
40 #include <dpl/availability.h>
41 #include <dpl/colors.h>
42 #include <dpl/gdbbacktrace.h>
43 #include <dpl/singleton.h>
44 #include <dpl/test/test_failed.h>
45 #include <dpl/test/test_ignored.h>
46 #include <dpl/test/test_results_collector.h>
52 typedef std::map<std::string, TestResultsCollectorBasePtr>
53 TestResultsCollectors;
54 TestResultsCollectors m_collectors;
56 std::string m_startTestId;
59 std::queue<std::string> m_failReason;
63 m_currentTestCase(nullptr)
65 , m_allowChildLogs(false)
68 void beginPerformanceTestTime(std::chrono::system_clock::duration maxTimeInMicroseconds);
69 void endPerformanceTestTime();
70 void getCurrentTestCasePerformanceResult(bool& isPerformanceTest,
71 std::chrono::system_clock::duration& result,
72 std::chrono::system_clock::duration& resultMax);
73 void setCurrentTestCasePerformanceResult(bool isPerformanceTest,
74 std::chrono::system_clock::duration result,
75 std::chrono::system_clock::duration resultMax);
77 void addFailReason(const std::string &reason);
79 typedef void (*TestCase)();
87 bool m_isPerformanceTest;
88 std::chrono::system_clock::time_point m_performanceTestStartTime;
89 std::chrono::system_clock::duration m_performanceTestDurationTime;
90 std::chrono::system_clock::duration m_performanceMaxTime;
92 bool operator <(const TestCaseStruct &other) const
94 return name < other.name;
97 bool operator ==(const TestCaseStruct &other) const
99 return name == other.name;
102 TestCaseStruct(const std::string &n, TestCase p) :
105 m_isPerformanceTest(false)
109 typedef std::list<TestCaseStruct> TestCaseStructList;
110 typedef std::map<std::string, TestCaseStructList> TestCaseGroupMap;
111 TestCaseGroupMap m_testGroups;
113 TestCaseStruct * m_currentTestCase;
115 typedef std::set<std::string> SelectedTestNameSet;
116 SelectedTestNameSet m_selectedTestNamesSet;
117 typedef std::set<std::string> SelectedTestGroupSet;
118 SelectedTestGroupSet m_selectedTestGroupSet;
119 std::string m_currentGroup;
121 DPL::Atomic m_totalAssertions;
123 // Terminate without any logs.
124 // Some test requires to call fork function.
125 // Child process must not produce any logs and should die quietly.
127 bool m_allowChildLogs;
130 void InvalidArgs(const std::string& message = "Invalid arguments!");
133 bool filterGroupsByXmls(const std::vector<std::string> & files);
134 bool filterByXML(std::map<std::string, bool> & casesMap);
135 void normalizeXMLTag(std::string& str, const std::string& testcase);
137 enum Status { FAILED, IGNORED, PASS };
139 Status RunTestCase(const TestCaseStruct& testCase);
141 void setCurrentTestCase(TestCaseStruct* testCase);
142 TestCaseStruct *getCurrentTestCase();
146 std::string getConcatedFailReason(const std::string &reason);
148 void CollectResult(const std::string& id,
149 const TestResultsCollectorBase::FailStatus status
150 = TestResultsCollectorBase::FailStatus::NONE,
151 const std::string& reason = std::string(),
152 const bool& isPerformanceTest = false,
153 const std::chrono::system_clock::duration& performanceTime = std::chrono::microseconds::zero(),
154 const std::chrono::system_clock::duration& performanceMaxTime = std::chrono::microseconds::zero());
157 void MarkAssertion();
159 void RegisterTest(const char *testName, TestCase proc);
160 void InitGroup(const char* name);
162 int ExecTestRunner(int argc, char *argv[]);
163 typedef std::vector<std::string> ArgsList;
164 int ExecTestRunner(ArgsList args);
165 bool getRunIgnored() const;
166 // The runner will terminate as soon as possible (after current test).
168 bool GetAllowChildLogs();
171 typedef DPL::Singleton<TestRunner> TestRunnerSingleton;
175 #define RUNNER_TEST_GROUP_INIT(GroupName) \
176 static int Static##GroupName##Init() \
178 DPL::Test::TestRunnerSingleton::Instance().InitGroup(#GroupName); \
181 const int DPL_UNUSED Static##GroupName##InitVar = \
182 Static##GroupName##Init();
184 #define RUNNER_TEST(Proc) \
186 static int Static##Proc##Init() \
188 DPL::Test::TestRunnerSingleton::Instance().RegisterTest(#Proc, &Proc); \
191 const int DPL_UNUSED Static##Proc##InitVar = Static##Proc##Init(); \
198 * Use them to create assertions in test cases. To do that put them inside test
199 * body. Failing assertion indicates failing test.
202 #define RUNNER_ASSERT_MSG(test, message) \
205 DPL::Test::TestRunnerSingleton::Instance().MarkAssertion(); \
209 std::ostringstream assertMsg; \
210 assertMsg << message << DPL::gdbbacktrace(); \
211 DPL::Test::TestFailed e(#test, \
215 if (!std::uncaught_exception()) \
217 DPL::Test::TestRunnerSingleton::Instance().addFailReason(e.GetMessage()); \
221 #define RUNNER_ASSERT_ERRNO_MSG(test, message) \
224 DPL::Test::TestRunnerSingleton::Instance().MarkAssertion(); \
228 const char *err = strerror(errno); \
229 std::ostringstream assertMsg; \
230 assertMsg << message; \
231 if (!assertMsg.str().empty()) \
233 assertMsg << err << DPL::gdbbacktrace(); \
234 DPL::Test::TestFailed e(#test, \
238 if (!std::uncaught_exception()) \
240 DPL::Test::TestRunnerSingleton::Instance().addFailReason(e.GetMessage()); \
244 #define RUNNER_ASSERT_ERRNO(test) \
245 RUNNER_ASSERT_ERRNO_MSG(test, "")
247 #define RUNNER_FAIL_MSG(message) \
248 RUNNER_ASSERT_MSG(false, message)
250 #define RUNNER_ASSERT(test) \
251 RUNNER_ASSERT_MSG(test, "")
256 * When test reaches this macro call, its furhter code will be ignored.
257 * To ignore whole test, put this macro call at the beginning of this tests
261 #define RUNNER_IGNORED_MSG(message) \
264 std::ostringstream assertMsg; \
265 assertMsg << message; \
266 throw DPL::Test::TestIgnored(assertMsg.str()); \
272 * Use these macros to do the time measurement. The first macro will start time measurement,
273 * the second will gather the result. These macros can be used only once per test-case.
274 * The result of time measurement will be displayed only if the test will pass.
275 * Notice that these macros will work only if will be used in parent process. If these
276 * macros will be used in child process then there will be no time measure results printed.
277 * This macro in multiprocess tests has effect only if used in parent process. This macro
278 * used in child process in multiprocess test has no effect.
279 * The precision of measurement is 1 microsecond - the smallest time value that can be
280 * measured is 0.000001s.
281 * The time measure results will be printed only specific output format:
287 #define RUNNER_PERF_TEST_BEGIN(maxTime) \
289 DPL::Test::TestRunnerSingleton::Instance().beginPerformanceTestTime( \
290 std::chrono::microseconds{static_cast<long long int>(maxTime*1000000.0)}); \
293 #define RUNNER_PERF_TEST_END() \
295 DPL::Test::TestRunnerSingleton::Instance().endPerformanceTestTime(); \
301 * Use these macros to print error messages during test run time
304 #define RUNNER_ERROR_MSG(message) \
306 std::cerr << DPL::Colors::Text::RED_BEGIN << message \
307 << DPL::Colors::Text::RED_END << std::endl; \
310 #endif // DPL_TEST_RUNNER_H