2 Simple DirectMedia Layer
3 Copyright (C) 1997-2018 Sam Lantinga <slouken@libsdl.org>
5 This software is provided 'as-is', without any express or implied
6 warranty. In no event will the authors be held liable for any damages
7 arising from the use of this software.
9 Permission is granted to anyone to use this software for any purpose,
10 including commercial applications, and to alter it and redistribute it
11 freely, subject to the following restrictions:
13 1. The origin of this software must not be misrepresented; you must not
14 claim that you wrote the original software. If you use this software
15 in a product, an acknowledgment in the product documentation would be
16 appreciated but is not required.
17 2. Altered source versions must be plainly marked as such, and must not be
18 misrepresented as being the original software.
19 3. This notice may not be removed or altered from any source distribution.
22 #include "SDL_config.h"
30 /* Invalid test name/description message format */
31 #define SDLTEST_INVALID_NAME_FORMAT "(Invalid)"
33 /* Log summary message format */
34 #define SDLTEST_LOG_SUMMARY_FORMAT "%s Summary: Total=%d Passed=%d Failed=%d Skipped=%d Unsupported=%d"
36 /* Final result message format */
37 #define SDLTEST_FINAL_RESULT_FORMAT ">>> %s '%s': %s\n"
39 /* ! \brief Timeout for single test case execution */
40 static Uint32 SDLTest_TestCaseTimeout = 3600;
43 * Generates a random run seed string for the harness. The generated seed
44 * will contain alphanumeric characters (0-9A-Z).
46 * Note: The returned string needs to be deallocated by the caller.
48 * \param length The length of the seed string to generate
50 * \returns The generated seed string
53 SDLTest_GenerateRunSeed(const int length)
56 SDLTest_RandomContext randomContext;
59 /* Sanity check input */
61 SDLTest_LogError("The length of the harness seed must be >0.");
65 /* Allocate output buffer */
66 seed = (char *)SDL_malloc((length + 1) * sizeof(char));
68 SDLTest_LogError("SDL_malloc for run seed output buffer failed.");
69 SDL_Error(SDL_ENOMEM);
73 /* Generate a random string of alphanumeric characters */
74 SDLTest_RandomInitTime(&randomContext);
75 for (counter = 0; counter < length; counter++) {
76 unsigned int number = SDLTest_Random(&randomContext);
77 char ch = (char) (number % (91 - 48)) + 48;
78 if (ch >= 58 && ch <= 64) {
89 * Generates an execution key for the fuzzer.
91 * \param runSeed The run seed to use
92 * \param suiteName The name of the test suite
93 * \param testName The name of the test
94 * \param iteration The iteration count
96 * \returns The generated execution key to initialize the fuzzer with.
100 SDLTest_GenerateExecKey(const char *runSeed, char *suiteName, char *testName, int iteration)
102 SDLTest_Md5Context md5Context;
104 char iterationString[16];
105 size_t runSeedLength;
106 size_t suiteNameLength;
107 size_t testNameLength;
108 size_t iterationStringLength;
109 size_t entireStringLength;
112 if (runSeed == NULL || runSeed[0] == '\0') {
113 SDLTest_LogError("Invalid runSeed string.");
117 if (suiteName == NULL || suiteName[0] == '\0') {
118 SDLTest_LogError("Invalid suiteName string.");
122 if (testName == NULL || testName[0] == '\0') {
123 SDLTest_LogError("Invalid testName string.");
127 if (iteration <= 0) {
128 SDLTest_LogError("Invalid iteration count.");
132 /* Convert iteration number into a string */
133 SDL_memset(iterationString, 0, sizeof(iterationString));
134 SDL_snprintf(iterationString, sizeof(iterationString) - 1, "%d", iteration);
136 /* Combine the parameters into single string */
137 runSeedLength = SDL_strlen(runSeed);
138 suiteNameLength = SDL_strlen(suiteName);
139 testNameLength = SDL_strlen(testName);
140 iterationStringLength = SDL_strlen(iterationString);
141 entireStringLength = runSeedLength + suiteNameLength + testNameLength + iterationStringLength + 1;
142 buffer = (char *)SDL_malloc(entireStringLength);
143 if (buffer == NULL) {
144 SDLTest_LogError("Failed to allocate buffer for execKey generation.");
145 SDL_Error(SDL_ENOMEM);
148 SDL_snprintf(buffer, entireStringLength, "%s%s%s%d", runSeed, suiteName, testName, iteration);
150 /* Hash string and use half of the digest as 64bit exec key */
151 SDLTest_Md5Init(&md5Context);
152 SDLTest_Md5Update(&md5Context, (unsigned char *)buffer, (unsigned int) entireStringLength);
153 SDLTest_Md5Final(&md5Context);
155 keys = (Uint64 *)md5Context.digest;
161 * \brief Set timeout handler for test.
163 * Note: SDL_Init(SDL_INIT_TIMER) will be called if it wasn't done so before.
165 * \param timeout Timeout interval in seconds.
166 * \param callback Function that will be called after timeout has elapsed.
168 * \return Timer id or -1 on failure.
171 SDLTest_SetTestTimeout(int timeout, void (*callback)())
173 Uint32 timeoutInMilliseconds;
176 if (callback == NULL) {
177 SDLTest_LogError("Timeout callback can't be NULL");
182 SDLTest_LogError("Timeout value must be bigger than zero.");
186 /* Init SDL timer if not initialized before */
187 if (SDL_WasInit(SDL_INIT_TIMER) == 0) {
188 if (SDL_InitSubSystem(SDL_INIT_TIMER)) {
189 SDLTest_LogError("Failed to init timer subsystem: %s", SDL_GetError());
195 timeoutInMilliseconds = timeout * 1000;
196 timerID = SDL_AddTimer(timeoutInMilliseconds, (SDL_TimerCallback)callback, 0x0);
198 SDLTest_LogError("Creation of SDL timer failed: %s", SDL_GetError());
206 * \brief Timeout handler. Aborts test run and exits harness process.
208 static SDL_NORETURN void
211 SDLTest_LogError("TestCaseTimeout timer expired. Aborting test run.");
212 exit(TEST_ABORTED); /* bail out from the test */
216 * \brief Execute a test using the given execution key.
218 * \param testSuite Suite containing the test case.
219 * \param testCase Case to execute.
220 * \param execKey Execution key for the fuzzer.
221 * \param forceTestRun Force test to run even if test was disabled in suite.
223 * \returns Test case result.
226 SDLTest_RunTest(SDLTest_TestSuiteReference *testSuite, const SDLTest_TestCaseReference *testCase, Uint64 execKey, SDL_bool forceTestRun)
228 SDL_TimerID timer = 0;
229 int testCaseResult = 0;
233 if (testSuite==NULL || testCase==NULL || testSuite->name==NULL || testCase->name==NULL)
235 SDLTest_LogError("Setup failure: testSuite or testCase references NULL");
236 return TEST_RESULT_SETUP_FAILURE;
239 if (!testCase->enabled && forceTestRun == SDL_FALSE)
241 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Skipped (Disabled)");
242 return TEST_RESULT_SKIPPED;
245 /* Initialize fuzzer */
246 SDLTest_FuzzerInit(execKey);
248 /* Reset assert tracker */
249 SDLTest_ResetAssertSummary();
251 /* Set timeout timer */
252 //timer = SDLTest_SetTestTimeout(SDLTest_TestCaseTimeout, SDLTest_BailOut);
254 /* Maybe run suite initalizer function */
255 if (testSuite->testSetUp) {
256 testSuite->testSetUp(0x0);
257 if (SDLTest_AssertSummaryToTestResult() == TEST_RESULT_FAILED) {
258 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Suite Setup", testSuite->name, "Failed");
259 return TEST_RESULT_SETUP_FAILURE;
263 /* Run test case function */
264 testCaseResult = testCase->testCase(0x0);
266 /* Convert test execution result into harness result */
267 if (testCaseResult == TEST_SKIPPED) {
268 /* Test was programatically skipped */
269 testResult = TEST_RESULT_SKIPPED;
270 } else if (testCaseResult == TEST_STARTED) {
271 /* Test did not return a TEST_COMPLETED value; assume it failed */
272 testResult = TEST_RESULT_FAILED;
273 } else if (testCaseResult == TEST_ABORTED) {
274 /* Test was aborted early; assume it failed */
275 testResult = TEST_RESULT_FAILED;
276 } else if (testCaseResult == TEST_UNSUPPORTED) {
277 /* Test was unsupported */
278 testResult = TEST_RESULT_UNSUPPORTED;
280 /* Perform failure analysis based on asserts */
281 testResult = SDLTest_AssertSummaryToTestResult();
284 /* Maybe run suite cleanup function (ignore failed asserts) */
285 if (testSuite->testTearDown) {
286 testSuite->testTearDown(0x0);
289 /* Cancel timeout timer */
291 SDL_RemoveTimer(timer);
294 /* Report on asserts and fuzzer usage */
295 fuzzerCount = SDLTest_GetFuzzerInvocationCount();
296 if (fuzzerCount > 0) {
297 SDLTest_Log("Fuzzer invocations: %d", fuzzerCount);
300 /* Final log based on test execution result */
301 if (testCaseResult == TEST_SKIPPED) {
302 /* Test was programatically skipped */
303 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Skipped (Programmatically)");
304 } else if (testCaseResult == TEST_STARTED) {
305 /* Test did not return a TEST_COMPLETED value; assume it failed */
306 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Failed (test started, but did not return TEST_COMPLETED)");
307 } else if (testCaseResult == TEST_ABORTED) {
308 /* Test was aborted early; assume it failed */
309 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Failed (Aborted)");
310 } else if (testCaseResult == TEST_UNSUPPORTED) {
311 /* Test was unsupported */
312 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Unsupported");
314 SDLTest_LogAssertSummary();
320 /* Prints summary of all suites/tests contained in the given reference */
322 static void SDLTest_LogTestSuiteSummary(SDLTest_TestSuiteReference *testSuites)
326 SDLTest_TestSuiteReference *testSuite;
327 SDLTest_TestCaseReference *testCase;
329 /* Loop over all suites */
331 while(&testSuites[suiteCounter]) {
332 testSuite=&testSuites[suiteCounter];
334 SDLTest_Log("Test Suite %i - %s\n", suiteCounter,
335 (testSuite->name) ? testSuite->name : SDLTEST_INVALID_NAME_FORMAT);
337 /* Loop over all test cases */
339 while(testSuite->testCases[testCounter])
341 testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
343 SDLTest_Log(" Test Case %i - %s: %s", testCounter,
344 (testCase->name) ? testCase->name : SDLTEST_INVALID_NAME_FORMAT,
345 (testCase->description) ? testCase->description : SDLTEST_INVALID_NAME_FORMAT);
351 /* Gets a timer value in seconds */
352 static float GetClock()
354 float currentClock = clock() / (float) CLOCKS_PER_SEC;
358 void SDLTest_writeLogFile(SDL_RWops *rwops, SDL_PRINTF_FORMAT_STRING const char *fmt, ...)
361 char logMessage[SDLTEST_MAX_LOGMESSAGE_LENGTH];
363 /* Print log message into a buffer */
364 SDL_memset(logMessage, 0, SDLTEST_MAX_LOGMESSAGE_LENGTH);
366 SDL_vsnprintf(logMessage, SDLTEST_MAX_LOGMESSAGE_LENGTH - 1, fmt, list);
369 char *message = SDL_stack_alloc(char, SDLTEST_MAX_LOGMESSAGE_LENGTH);
373 size_t len = SDL_strlen(logMessage);
374 SDL_strlcpy(message, logMessage,len+1);
376 char *text = SDL_stack_alloc(char, SDLTEST_MAX_LOGMESSAGE_LENGTH);
379 SDL_snprintf(text, SDLTEST_MAX_LOGMESSAGE_LENGTH, " INFO: %s\n", message);
380 SDL_RWwrite(rwops, text, 1, SDL_strlen(text));
381 SDL_stack_free(text);
383 SDL_stack_free(message);
387 * \brief Execute a test suite using the given run seed and execution key.
389 * The filter string is matched to the suite name (full comparison) to select a single suite,
390 * or if no suite matches, it is matched to the test names (full comparison) to select a single test.
392 * \param testSuites Suites containing the test case.
393 * \param userRunSeed Custom run seed provided by user, or NULL to autogenerate one.
394 * \param userExecKey Custom execution key provided by user, or 0 to autogenerate one.
395 * \param filter Filter specification. NULL disables. Case sensitive.
396 * \param testIterations Number of iterations to run each test case.
398 * \returns Test run result; 0 when all tests passed, 1 if any tests failed.
400 int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *userRunSeed, Uint64 userExecKey, const char *filter, int testIterations)
402 int totalNumberOfTests = 0;
403 int failedNumberOfTests = 0;
406 int iterationCounter;
407 SDLTest_TestSuiteReference *testSuite;
408 const SDLTest_TestCaseReference *testCase;
409 const char *runSeed = NULL;
410 char *currentSuiteName;
411 char *currentTestName;
413 float runStartSeconds;
414 float suiteStartSeconds;
415 float testStartSeconds;
417 float suiteEndSeconds;
418 float testEndSeconds;
421 char *suiteFilterName = NULL;
423 char *testFilterName = NULL;
424 SDL_bool forceTestRun = SDL_FALSE;
427 Uint32 totalTestFailedCount = 0;
428 Uint32 totalTestPassedCount = 0;
429 Uint32 totalTestSkippedCount = 0;
430 Uint32 totalTestUnsupportedCount = 0;
431 Uint32 testFailedCount = 0;
432 Uint32 testPassedCount = 0;
433 Uint32 testSkippedCount = 0;
434 Uint32 testUnsupportedCount = 0;
436 const SDLTest_TestCaseReference **failedTests;
438 /* Sanitize test iterations */
439 if (testIterations < 1) {
443 /* Generate run see if we don't have one already */
444 if (userRunSeed == NULL || userRunSeed[0] == '\0') {
445 runSeed = SDLTest_GenerateRunSeed(16);
446 if (runSeed == NULL) {
447 SDLTest_LogError("Generating a random seed failed");
451 runSeed = userRunSeed;
455 /* Reset per-run counters */
456 totalTestFailedCount = 0;
457 totalTestPassedCount = 0;
458 totalTestSkippedCount = 0;
459 totalTestUnsupportedCount = 0;
461 /* Take time - run start */
462 runStartSeconds = GetClock();
464 /* Log run with fuzzer parameters */
465 SDLTest_Log("::::: Test Run /w seed '%s' started\n", runSeed);
467 /* Count the total number of tests */
469 while (testSuites[suiteCounter]) {
470 testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
473 while (testSuite->testCases[testCounter])
476 totalNumberOfTests++;
480 /* Pre-allocate an array for tracking failed tests (potentially all test cases) */
481 failedTests = (const SDLTest_TestCaseReference **)SDL_malloc(totalNumberOfTests * sizeof(SDLTest_TestCaseReference *));
482 if (failedTests == NULL) {
483 SDLTest_LogError("Unable to allocate cache for failed tests");
484 SDL_Error(SDL_ENOMEM);
488 /* Initialize filtering */
489 if (filter != NULL && filter[0] != '\0') {
490 /* Loop over all suites to check if we have a filter match */
492 while (testSuites[suiteCounter] && suiteFilter == 0) {
493 testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
495 if (testSuite->name != NULL && SDL_strcmp(filter, testSuite->name) == 0) {
496 /* Matched a suite name */
498 suiteFilterName = testSuite->name;
499 SDLTest_Log("Filtering: running only suite '%s'", suiteFilterName);
503 /* Within each suite, loop over all test cases to check if we have a filter match */
505 while (testSuite->testCases[testCounter] && testFilter == 0)
507 testCase = testSuite->testCases[testCounter];
509 if (testCase->name != NULL && SDL_strcmp(filter, testCase->name) == 0) {
510 /* Matched a test name */
512 suiteFilterName = testSuite->name;
514 testFilterName = testCase->name;
515 SDLTest_Log("Filtering: running only test '%s' in suite '%s'", testFilterName, suiteFilterName);
521 if (suiteFilter == 0 && testFilter == 0) {
522 SDLTest_LogError("Filter '%s' did not match any test suite/case.", filter);
523 SDLTest_Log("Exit code: 2");
524 SDL_free((void *) failedTests);
532 SDL_RWops *rwops = SDL_RWFromFile("SDL_Log_Summary.txt", "a+");
534 /* Loop over all suites */
536 while(testSuites[suiteCounter]) {
537 testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
538 currentSuiteName = (char *)((testSuite->name) ? testSuite->name : SDLTEST_INVALID_NAME_FORMAT);
541 /* Filter suite if flag set and we have a name */
542 if (suiteFilter == 1 && suiteFilterName != NULL && testSuite->name != NULL &&
543 SDL_strcmp(suiteFilterName, testSuite->name) != 0) {
545 SDLTest_Log("===== Test Suite %i: '%s' skipped\n", suiteCounter, currentSuiteName);
546 SDLTest_writeLogFile(rwops, "===== Test Suite %i: '%s' skipped\n", suiteCounter, currentSuiteName);
549 /* Reset per-suite counters */
552 testSkippedCount = 0;
553 testUnsupportedCount = 0;
555 /* Take time - suite start */
556 suiteStartSeconds = GetClock();
558 /* Log suite started */
559 SDLTest_Log("===== Test Suite %i: '%s' started\n", suiteCounter, currentSuiteName);
560 SDLTest_writeLogFile(rwops, "===== Test Suite %i: '%s' started\n", suiteCounter, currentSuiteName);
562 /* Loop over all test cases */
564 while(testSuite->testCases[testCounter])
566 testCase = testSuite->testCases[testCounter];
567 currentTestName = (char *)((testCase->name) ? testCase->name : SDLTEST_INVALID_NAME_FORMAT);
570 /* Filter tests if flag set and we have a name */
571 if (testFilter == 1 && testFilterName != NULL && testCase->name != NULL &&
572 SDL_strcmp(testFilterName, testCase->name) != 0) {
574 SDLTest_Log("===== Test Case %i.%i: '%s' skipped\n",
579 /* Override 'disabled' flag if we specified a test filter (i.e. force run for debugging) */
580 if (testFilter == 1 && !testCase->enabled) {
581 SDLTest_Log("Force run of disabled test since test filter was set");
582 forceTestRun = SDL_TRUE;
585 /* Take time - test start */
586 testStartSeconds = GetClock();
588 /* Log test started */
589 SDLTest_Log("----- Test Case %i.%i: '%s' started",
593 if (testCase->description != NULL && testCase->description[0] != '\0') {
594 SDLTest_Log("Test Description: '%s'",
595 (testCase->description) ? testCase->description : SDLTEST_INVALID_NAME_FORMAT);
598 /* Loop over all iterations */
599 iterationCounter = 0;
600 while(iterationCounter < testIterations)
604 if (userExecKey != 0) {
605 execKey = userExecKey;
607 execKey = SDLTest_GenerateExecKey(runSeed, testSuite->name, testCase->name, iterationCounter);
610 SDLTest_Log("Test Iteration %i: execKey %" SDL_PRIu64, iterationCounter, execKey);
611 testResult = SDLTest_RunTest(testSuite, testCase, execKey, forceTestRun);
613 if (testResult == TEST_RESULT_PASSED) {
615 totalTestPassedCount++;
616 } else if (testResult == TEST_RESULT_SKIPPED) {
618 totalTestSkippedCount++;
619 } else if (testResult == TEST_RESULT_UNSUPPORTED) {
620 testUnsupportedCount++;
621 totalTestUnsupportedCount++;
624 totalTestFailedCount++;
628 /* Take time - test end */
629 testEndSeconds = GetClock();
630 runtime = testEndSeconds - testStartSeconds;
631 if (runtime < 0.0f) runtime = 0.0f;
633 if (testIterations > 1) {
634 /* Log test runtime */
635 SDLTest_Log("Runtime of %i iterations: %.1f sec", testIterations, runtime);
636 SDLTest_Log("Average Test runtime: %.5f sec", runtime / (float)testIterations);
638 /* Log test runtime */
639 SDLTest_Log("Total Test runtime: %.1f sec", runtime);
642 /* Log final test result */
643 switch (testResult) {
644 case TEST_RESULT_PASSED:
645 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", currentTestName, "Passed");
646 SDLTest_writeLogFile(rwops, SDLTEST_FINAL_RESULT_FORMAT, "Test", currentTestName, "Passed");
648 case TEST_RESULT_FAILED:
649 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", currentTestName, "Failed");
650 SDLTest_writeLogFile(rwops, SDLTEST_FINAL_RESULT_FORMAT, "Test", currentTestName, "Failed");
652 case TEST_RESULT_NO_ASSERT:
653 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT,"Test", currentTestName, "No Asserts");
654 SDLTest_writeLogFile(rwops, SDLTEST_FINAL_RESULT_FORMAT, "Test", currentTestName, "No Asserts");
658 /* Collect failed test case references for repro-step display */
659 if (testResult == TEST_RESULT_FAILED) {
660 failedTests[failedNumberOfTests] = testCase;
661 failedNumberOfTests++;
666 /* Take time - suite end */
667 suiteEndSeconds = GetClock();
668 runtime = suiteEndSeconds - suiteStartSeconds;
669 if (runtime < 0.0f) runtime = 0.0f;
671 /* Log suite runtime */
672 SDLTest_Log("Total Suite runtime: %.1f sec", runtime);
673 SDLTest_writeLogFile(rwops, "Total Suite runtime: %.1f sec", runtime);
675 /* Log summary and final Suite result */
676 countSum = testPassedCount + testFailedCount + testSkippedCount + testUnsupportedCount;
677 if (testFailedCount == 0)
679 SDLTest_Log(SDLTEST_LOG_SUMMARY_FORMAT, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount, testUnsupportedCount);
680 SDLTest_writeLogFile(rwops, SDLTEST_LOG_SUMMARY_FORMAT, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount, testUnsupportedCount);
681 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Suite", currentSuiteName, "Passed");
682 SDLTest_writeLogFile(rwops, SDLTEST_FINAL_RESULT_FORMAT, "Suite", currentSuiteName, "Passed");
686 SDLTest_LogError(SDLTEST_LOG_SUMMARY_FORMAT, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount, testUnsupportedCount);
687 SDLTest_writeLogFile(rwops, SDLTEST_LOG_SUMMARY_FORMAT, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount, testUnsupportedCount);
688 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Suite", currentSuiteName, "Failed");
689 SDLTest_writeLogFile(rwops, SDLTEST_FINAL_RESULT_FORMAT, "Suite", currentSuiteName, "Failed");
695 /* Take time - run end */
696 runEndSeconds = GetClock();
697 runtime = runEndSeconds - runStartSeconds;
698 if (runtime < 0.0f) runtime = 0.0f;
700 /* Log total runtime */
701 SDLTest_Log("Total Run runtime: %.1f sec", runtime);
703 /* Log summary and final run result */
704 countSum = totalTestPassedCount + totalTestFailedCount + totalTestSkippedCount + totalTestUnsupportedCount;
705 if (totalTestFailedCount == 0)
708 SDLTest_Log(SDLTEST_LOG_SUMMARY_FORMAT, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount, totalTestUnsupportedCount);
709 SDLTest_writeLogFile(rwops, SDLTEST_LOG_SUMMARY_FORMAT, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount, totalTestUnsupportedCount);
710 SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Run /w seed", runSeed, "Passed");
711 SDLTest_writeLogFile(rwops, SDLTEST_FINAL_RESULT_FORMAT, "Run /w seed", runSeed, "Passed");
717 SDLTest_LogError(SDLTEST_LOG_SUMMARY_FORMAT, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount, totalTestUnsupportedCount);
718 SDLTest_writeLogFile(rwops, SDLTEST_LOG_SUMMARY_FORMAT, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount, totalTestUnsupportedCount);
719 SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Run /w seed", runSeed, "Failed");
720 SDLTest_writeLogFile(rwops, SDLTEST_FINAL_RESULT_FORMAT, "Run /w seed", runSeed, "Failed");
723 /* Print repro steps for failed tests */
724 if (failedNumberOfTests > 0) {
725 SDLTest_Log("Harness input to repro failures:");
726 for (testCounter = 0; testCounter < failedNumberOfTests; testCounter++) {
727 SDLTest_Log(" --seed %s --filter %s", runSeed, failedTests[testCounter]->name);
731 SDL_free((void *) failedTests);
733 SDLTest_Log("Exit code: %d", runResult);
737 /* vi: set ts=4 sw=4 expandtab: */