2 Simple DirectMedia Layer
3 Copyright (C) 1997-2018 Sam Lantinga <slouken@libsdl.org>
5 This software is provided 'as-is', without any express or implied
6 warranty. In no event will the authors be held liable for any damages
7 arising from the use of this software.
9 Permission is granted to anyone to use this software for any purpose,
10 including commercial applications, and to alter it and redistribute it
11 freely, subject to the following restrictions:
13 1. The origin of this software must not be misrepresented; you must not
14 claim that you wrote the original software. If you use this software
15 in a product, an acknowledgment in the product documentation would be
16 appreciated but is not required.
17 2. Altered source versions must be plainly marked as such, and must not be
18 misrepresented as being the original software.
19 3. This notice may not be removed or altered from any source distribution.
22 #include "SDL_config.h"
31 /* Invalid test name/description message format */
32 #define SDLTEST_INVALID_NAME_FORMAT "(Invalid)"
34 /* Log summary message format */
35 #define SDLTEST_LOG_SUMMARY_FORMAT "%s Summary: Total=%d Passed=%d Failed=%d Skipped=%d"
37 /* Final result message format */
38 #define SDLTEST_FINAL_RESULT_FORMAT ">>> %s '%s': %s\n"
40 /* ! \brief Timeout for single test case execution */
41 static Uint32 SDLTest_TestCaseTimeout = 3600;
44 * Generates a random run seed string for the harness. The generated seed
45 * will contain alphanumeric characters (0-9A-Z).
47 * Note: The returned string needs to be deallocated by the caller.
49 * \param length The length of the seed string to generate
51 * \returns The generated seed string
54 SDLTest_GenerateRunSeed(const int length)
57 SDLTest_RandomContext randomContext;
60 /* Sanity check input */
62 SDLTest_LogError("The length of the harness seed must be >0.");
66 /* Allocate output buffer */
67 seed = (char *)SDL_malloc((length + 1) * sizeof(char));
69 SDLTest_LogError("SDL_malloc for run seed output buffer failed.");
70 SDL_Error(SDL_ENOMEM);
74 /* Generate a random string of alphanumeric characters */
75 SDLTest_RandomInitTime(&randomContext);
76 for (counter = 0; counter < length; counter++) {
77 unsigned int number = SDLTest_Random(&randomContext);
78 char ch = (char) (number % (91 - 48)) + 48;
79 if (ch >= 58 && ch <= 64) {
90 * Generates an execution key for the fuzzer.
92 * \param runSeed The run seed to use
93 * \param suiteName The name of the test suite
94 * \param testName The name of the test
95 * \param iteration The iteration count
97 * \returns The generated execution key to initialize the fuzzer with.
101 SDLTest_GenerateExecKey(const char *runSeed, char *suiteName, char *testName, int iteration)
103 SDLTest_Md5Context md5Context;
105 char iterationString[16];
106 size_t runSeedLength;
107 size_t suiteNameLength;
108 size_t testNameLength;
109 size_t iterationStringLength;
110 size_t entireStringLength;
113 if (runSeed == NULL || runSeed[0] == '\0') {
114 SDLTest_LogError("Invalid runSeed string.");
118 if (suiteName == NULL || suiteName[0] == '\0') {
119 SDLTest_LogError("Invalid suiteName string.");
123 if (testName == NULL || testName[0] == '\0') {
124 SDLTest_LogError("Invalid testName string.");
128 if (iteration <= 0) {
129 SDLTest_LogError("Invalid iteration count.");
133 /* Convert iteration number into a string */
134 SDL_memset(iterationString, 0, sizeof(iterationString));
135 SDL_snprintf(iterationString, sizeof(iterationString) - 1, "%d", iteration);
137 /* Combine the parameters into single string */
138 runSeedLength = SDL_strlen(runSeed);
139 suiteNameLength = SDL_strlen(suiteName);
140 testNameLength = SDL_strlen(testName);
141 iterationStringLength = SDL_strlen(iterationString);
142 entireStringLength = runSeedLength + suiteNameLength + testNameLength + iterationStringLength + 1;
143 buffer = (char *)SDL_malloc(entireStringLength);
144 if (buffer == NULL) {
145 SDLTest_LogError("Failed to allocate buffer for execKey generation.");
146 SDL_Error(SDL_ENOMEM);
149 SDL_snprintf(buffer, entireStringLength, "%s%s%s%d", runSeed, suiteName, testName, iteration);
151 /* Hash string and use half of the digest as 64bit exec key */
152 SDLTest_Md5Init(&md5Context);
153 SDLTest_Md5Update(&md5Context, (unsigned char *)buffer, (unsigned int) entireStringLength);
154 SDLTest_Md5Final(&md5Context);
156 keys = (Uint64 *)md5Context.digest;
162 * \brief Set timeout handler for test.
164 * Note: SDL_Init(SDL_INIT_TIMER) will be called if it wasn't done so before.
166 * \param timeout Timeout interval in seconds.
167 * \param callback Function that will be called after timeout has elapsed.
169 * \return Timer id or -1 on failure.
172 SDLTest_SetTestTimeout(int timeout, void (*callback)())
174 Uint32 timeoutInMilliseconds;
177 if (callback == NULL) {
178 SDLTest_LogError("Timeout callback can't be NULL");
183 SDLTest_LogError("Timeout value must be bigger than zero.");
187 /* Init SDL timer if not initialized before */
188 if (SDL_WasInit(SDL_INIT_TIMER) == 0) {
189 if (SDL_InitSubSystem(SDL_INIT_TIMER)) {
190 SDLTest_LogError("Failed to init timer subsystem: %s", SDL_GetError());
196 timeoutInMilliseconds = timeout * 1000;
197 timerID = SDL_AddTimer(timeoutInMilliseconds, (SDL_TimerCallback)callback, 0x0);
199 SDLTest_LogError("Creation of SDL timer failed: %s", SDL_GetError());
207 * \brief Timeout handler. Aborts test run and exits harness process.
209 static SDL_NORETURN void
212 SDLTest_LogError("TestCaseTimeout timer expired. Aborting test run.");
213 exit(TEST_ABORTED); /* bail out from the test */
217 * \brief Execute a test using the given execution key.
219 * \param testSuite Suite containing the test case.
220 * \param testCase Case to execute.
221 * \param execKey Execution key for the fuzzer.
222 * \param forceTestRun Force test to run even if test was disabled in suite.
224 * \returns Test case result.
227 SDLTest_RunTest(SDLTest_TestSuiteReference *testSuite, const SDLTest_TestCaseReference *testCase, Uint64 execKey, SDL_bool forceTestRun)
229 SDL_TimerID timer = 0;
230 int testCaseResult = 0;
234 if (testSuite==NULL || testCase==NULL || testSuite->name==NULL || testCase->name==NULL)
236 SDLTest_LogError("Setup failure: testSuite or testCase references NULL");
237 return TEST_RESULT_SETUP_FAILURE;
240 if (!testCase->enabled && forceTestRun == SDL_FALSE)
242 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Skipped (Disabled)");
243 return TEST_RESULT_SKIPPED;
246 /* Initialize fuzzer */
247 SDLTest_FuzzerInit(execKey);
249 /* Reset assert tracker */
250 SDLTest_ResetAssertSummary();
252 /* Set timeout timer */
253 timer = SDLTest_SetTestTimeout(SDLTest_TestCaseTimeout, SDLTest_BailOut);
255 /* Maybe run suite initalizer function */
256 if (testSuite->testSetUp) {
257 testSuite->testSetUp(0x0);
258 if (SDLTest_AssertSummaryToTestResult() == TEST_RESULT_FAILED) {
259 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Suite Setup", testSuite->name, "Failed");
260 return TEST_RESULT_SETUP_FAILURE;
264 /* Run test case function */
265 testCaseResult = testCase->testCase(0x0);
267 /* Convert test execution result into harness result */
268 if (testCaseResult == TEST_SKIPPED) {
269 /* Test was programatically skipped */
270 testResult = TEST_RESULT_SKIPPED;
271 } else if (testCaseResult == TEST_STARTED) {
272 /* Test did not return a TEST_COMPLETED value; assume it failed */
273 testResult = TEST_RESULT_FAILED;
274 } else if (testCaseResult == TEST_ABORTED) {
275 /* Test was aborted early; assume it failed */
276 testResult = TEST_RESULT_FAILED;
278 /* Perform failure analysis based on asserts */
279 testResult = SDLTest_AssertSummaryToTestResult();
282 /* Maybe run suite cleanup function (ignore failed asserts) */
283 if (testSuite->testTearDown) {
284 testSuite->testTearDown(0x0);
287 /* Cancel timeout timer */
289 SDL_RemoveTimer(timer);
292 /* Report on asserts and fuzzer usage */
293 fuzzerCount = SDLTest_GetFuzzerInvocationCount();
294 if (fuzzerCount > 0) {
295 SDLTest_Log("Fuzzer invocations: %d", fuzzerCount);
298 /* Final log based on test execution result */
299 if (testCaseResult == TEST_SKIPPED) {
300 /* Test was programatically skipped */
301 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Skipped (Programmatically)");
302 } else if (testCaseResult == TEST_STARTED) {
303 /* Test did not return a TEST_COMPLETED value; assume it failed */
304 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Failed (test started, but did not return TEST_COMPLETED)");
305 } else if (testCaseResult == TEST_ABORTED) {
306 /* Test was aborted early; assume it failed */
307 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Failed (Aborted)");
309 SDLTest_LogAssertSummary();
315 /* Prints summary of all suites/tests contained in the given reference */
317 static void SDLTest_LogTestSuiteSummary(SDLTest_TestSuiteReference *testSuites)
321 SDLTest_TestSuiteReference *testSuite;
322 SDLTest_TestCaseReference *testCase;
324 /* Loop over all suites */
326 while(&testSuites[suiteCounter]) {
327 testSuite=&testSuites[suiteCounter];
329 SDLTest_Log("Test Suite %i - %s\n", suiteCounter,
330 (testSuite->name) ? testSuite->name : SDLTEST_INVALID_NAME_FORMAT);
332 /* Loop over all test cases */
334 while(testSuite->testCases[testCounter])
336 testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
338 SDLTest_Log(" Test Case %i - %s: %s", testCounter,
339 (testCase->name) ? testCase->name : SDLTEST_INVALID_NAME_FORMAT,
340 (testCase->description) ? testCase->description : SDLTEST_INVALID_NAME_FORMAT);
346 /* Gets a timer value in seconds */
347 static float GetClock()
349 float currentClock = clock() / (float) CLOCKS_PER_SEC;
354 * \brief Execute a test suite using the given run seed and execution key.
356 * The filter string is matched to the suite name (full comparison) to select a single suite,
357 * or if no suite matches, it is matched to the test names (full comparison) to select a single test.
359 * \param testSuites Suites containing the test case.
360 * \param userRunSeed Custom run seed provided by user, or NULL to autogenerate one.
361 * \param userExecKey Custom execution key provided by user, or 0 to autogenerate one.
362 * \param filter Filter specification. NULL disables. Case sensitive.
363 * \param testIterations Number of iterations to run each test case.
365 * \returns Test run result; 0 when all tests passed, 1 if any tests failed.
367 int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *userRunSeed, Uint64 userExecKey, const char *filter, int testIterations)
369 int totalNumberOfTests = 0;
370 int failedNumberOfTests = 0;
373 int iterationCounter;
374 SDLTest_TestSuiteReference *testSuite;
375 const SDLTest_TestCaseReference *testCase;
376 const char *runSeed = NULL;
377 char *currentSuiteName;
378 char *currentTestName;
380 float runStartSeconds;
381 float suiteStartSeconds;
382 float testStartSeconds;
384 float suiteEndSeconds;
385 float testEndSeconds;
388 char *suiteFilterName = NULL;
390 char *testFilterName = NULL;
391 SDL_bool forceTestRun = SDL_FALSE;
394 Uint32 totalTestFailedCount = 0;
395 Uint32 totalTestPassedCount = 0;
396 Uint32 totalTestSkippedCount = 0;
397 Uint32 testFailedCount = 0;
398 Uint32 testPassedCount = 0;
399 Uint32 testSkippedCount = 0;
401 const SDLTest_TestCaseReference **failedTests;
403 /* Sanitize test iterations */
404 if (testIterations < 1) {
408 /* Generate run see if we don't have one already */
409 if (userRunSeed == NULL || userRunSeed[0] == '\0') {
410 runSeed = SDLTest_GenerateRunSeed(16);
411 if (runSeed == NULL) {
412 SDLTest_LogError("Generating a random seed failed");
416 runSeed = userRunSeed;
420 /* Reset per-run counters */
421 totalTestFailedCount = 0;
422 totalTestPassedCount = 0;
423 totalTestSkippedCount = 0;
425 /* Take time - run start */
426 runStartSeconds = GetClock();
428 /* Log run with fuzzer parameters */
429 SDLTest_Log("::::: Test Run /w seed '%s' started\n", runSeed);
431 /* Count the total number of tests */
433 while (testSuites[suiteCounter]) {
434 testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
437 while (testSuite->testCases[testCounter])
440 totalNumberOfTests++;
444 /* Pre-allocate an array for tracking failed tests (potentially all test cases) */
445 failedTests = (const SDLTest_TestCaseReference **)SDL_malloc(totalNumberOfTests * sizeof(SDLTest_TestCaseReference *));
446 if (failedTests == NULL) {
447 SDLTest_LogError("Unable to allocate cache for failed tests");
448 SDL_Error(SDL_ENOMEM);
452 /* Initialize filtering */
453 if (filter != NULL && filter[0] != '\0') {
454 /* Loop over all suites to check if we have a filter match */
456 while (testSuites[suiteCounter] && suiteFilter == 0) {
457 testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
459 if (testSuite->name != NULL && SDL_strcmp(filter, testSuite->name) == 0) {
460 /* Matched a suite name */
462 suiteFilterName = testSuite->name;
463 SDLTest_Log("Filtering: running only suite '%s'", suiteFilterName);
467 /* Within each suite, loop over all test cases to check if we have a filter match */
469 while (testSuite->testCases[testCounter] && testFilter == 0)
471 testCase = testSuite->testCases[testCounter];
473 if (testCase->name != NULL && SDL_strcmp(filter, testCase->name) == 0) {
474 /* Matched a test name */
476 suiteFilterName = testSuite->name;
478 testFilterName = testCase->name;
479 SDLTest_Log("Filtering: running only test '%s' in suite '%s'", testFilterName, suiteFilterName);
485 if (suiteFilter == 0 && testFilter == 0) {
486 SDLTest_LogError("Filter '%s' did not match any test suite/case.", filter);
487 SDLTest_Log("Exit code: 2");
488 SDL_free((void *) failedTests);
493 /* Loop over all suites */
495 while(testSuites[suiteCounter]) {
496 testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
497 currentSuiteName = (char *)((testSuite->name) ? testSuite->name : SDLTEST_INVALID_NAME_FORMAT);
500 /* Filter suite if flag set and we have a name */
501 if (suiteFilter == 1 && suiteFilterName != NULL && testSuite->name != NULL &&
502 SDL_strcmp(suiteFilterName, testSuite->name) != 0) {
504 SDLTest_Log("===== Test Suite %i: '%s' skipped\n",
509 /* Reset per-suite counters */
512 testSkippedCount = 0;
514 /* Take time - suite start */
515 suiteStartSeconds = GetClock();
517 /* Log suite started */
518 SDLTest_Log("===== Test Suite %i: '%s' started\n",
522 /* Loop over all test cases */
524 while(testSuite->testCases[testCounter])
526 testCase = testSuite->testCases[testCounter];
527 currentTestName = (char *)((testCase->name) ? testCase->name : SDLTEST_INVALID_NAME_FORMAT);
530 /* Filter tests if flag set and we have a name */
531 if (testFilter == 1 && testFilterName != NULL && testCase->name != NULL &&
532 SDL_strcmp(testFilterName, testCase->name) != 0) {
534 SDLTest_Log("===== Test Case %i.%i: '%s' skipped\n",
539 /* Override 'disabled' flag if we specified a test filter (i.e. force run for debugging) */
540 if (testFilter == 1 && !testCase->enabled) {
541 SDLTest_Log("Force run of disabled test since test filter was set");
542 forceTestRun = SDL_TRUE;
545 /* Take time - test start */
546 testStartSeconds = GetClock();
548 /* Log test started */
549 SDLTest_Log("----- Test Case %i.%i: '%s' started",
553 if (testCase->description != NULL && testCase->description[0] != '\0') {
554 SDLTest_Log("Test Description: '%s'",
555 (testCase->description) ? testCase->description : SDLTEST_INVALID_NAME_FORMAT);
558 /* Loop over all iterations */
559 iterationCounter = 0;
560 while(iterationCounter < testIterations)
564 if (userExecKey != 0) {
565 execKey = userExecKey;
567 execKey = SDLTest_GenerateExecKey(runSeed, testSuite->name, testCase->name, iterationCounter);
570 SDLTest_Log("Test Iteration %i: execKey %" SDL_PRIu64, iterationCounter, execKey);
571 testResult = SDLTest_RunTest(testSuite, testCase, execKey, forceTestRun);
573 if (testResult == TEST_RESULT_PASSED) {
575 totalTestPassedCount++;
576 } else if (testResult == TEST_RESULT_SKIPPED) {
578 totalTestSkippedCount++;
581 totalTestFailedCount++;
585 /* Take time - test end */
586 testEndSeconds = GetClock();
587 runtime = testEndSeconds - testStartSeconds;
588 if (runtime < 0.0f) runtime = 0.0f;
590 if (testIterations > 1) {
591 /* Log test runtime */
592 SDLTest_Log("Runtime of %i iterations: %.1f sec", testIterations, runtime);
593 SDLTest_Log("Average Test runtime: %.5f sec", runtime / (float)testIterations);
595 /* Log test runtime */
596 SDLTest_Log("Total Test runtime: %.1f sec", runtime);
599 /* Log final test result */
600 switch (testResult) {
601 case TEST_RESULT_PASSED:
602 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", currentTestName, "Passed");
604 case TEST_RESULT_FAILED:
605 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", currentTestName, "Failed");
607 case TEST_RESULT_NO_ASSERT:
608 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT,"Test", currentTestName, "No Asserts");
612 /* Collect failed test case references for repro-step display */
613 if (testResult == TEST_RESULT_FAILED) {
614 failedTests[failedNumberOfTests] = testCase;
615 failedNumberOfTests++;
620 /* Take time - suite end */
621 suiteEndSeconds = GetClock();
622 runtime = suiteEndSeconds - suiteStartSeconds;
623 if (runtime < 0.0f) runtime = 0.0f;
625 /* Log suite runtime */
626 SDLTest_Log("Total Suite runtime: %.1f sec", runtime);
628 /* Log summary and final Suite result */
629 countSum = testPassedCount + testFailedCount + testSkippedCount;
630 if (testFailedCount == 0)
632 SDLTest_Log(SDLTEST_LOG_SUMMARY_FORMAT, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
633 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Suite", currentSuiteName, "Passed");
637 SDLTest_LogError(SDLTEST_LOG_SUMMARY_FORMAT, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
638 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Suite", currentSuiteName, "Failed");
644 /* Take time - run end */
645 runEndSeconds = GetClock();
646 runtime = runEndSeconds - runStartSeconds;
647 if (runtime < 0.0f) runtime = 0.0f;
649 /* Log total runtime */
650 SDLTest_Log("Total Run runtime: %.1f sec", runtime);
652 /* Log summary and final run result */
653 countSum = totalTestPassedCount + totalTestFailedCount + totalTestSkippedCount;
654 if (totalTestFailedCount == 0)
657 SDLTest_Log(SDLTEST_LOG_SUMMARY_FORMAT, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
658 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Run /w seed", runSeed, "Passed");
663 SDLTest_LogError(SDLTEST_LOG_SUMMARY_FORMAT, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
664 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Run /w seed", runSeed, "Failed");
667 /* Print repro steps for failed tests */
668 if (failedNumberOfTests > 0) {
669 SDLTest_Log("Harness input to repro failures:");
670 for (testCounter = 0; testCounter < failedNumberOfTests; testCounter++) {
671 SDLTest_Log(" --seed %s --filter %s", runSeed, failedTests[testCounter]->name);
674 SDL_free((void *) failedTests);
676 SDLTest_Log("Exit code: %d", runResult);
680 /* vi: set ts=4 sw=4 expandtab: */