SDL

Форк
0
/
SDL_test_harness.c 
790 строк · 28.6 Кб
1
/*
2
  Simple DirectMedia Layer
3
  Copyright (C) 1997-2024 Sam Lantinga <slouken@libsdl.org>
4

5
  This software is provided 'as-is', without any express or implied
6
  warranty.  In no event will the authors be held liable for any damages
7
  arising from the use of this software.
8

9
  Permission is granted to anyone to use this software for any purpose,
10
  including commercial applications, and to alter it and redistribute it
11
  freely, subject to the following restrictions:
12

13
  1. The origin of this software must not be misrepresented; you must not
14
     claim that you wrote the original software. If you use this software
15
     in a product, an acknowledgment in the product documentation would be
16
     appreciated but is not required.
17
  2. Altered source versions must be plainly marked as such, and must not be
18
     misrepresented as being the original software.
19
  3. This notice may not be removed or altered from any source distribution.
20
*/
21
#include <SDL3/SDL_test.h>
22

23
#include <stdlib.h> /* Needed for exit() */
24

25
/* Enable to have color in logs */
26
#if 1
27
#define COLOR_RED       "\033[0;31m"
28
#define COLOR_GREEN     "\033[0;32m"
29
#define COLOR_YELLOW    "\033[0;93m"
30
#define COLOR_BLUE      "\033[0;94m"
31
#define COLOR_END       "\033[0m"
32
#else
33
#define COLOR_RED       ""
34
#define COLOR_GREEN     ""
35
#define COLOR_BLUE      ""
36
#define COLOR_YELLOW    ""
37
#define COLOR_END       ""
38
#endif
39

40
/* Invalid test name/description message format */
41
#define SDLTEST_INVALID_NAME_FORMAT "(Invalid)"
42

43
/* Log summary message format */
44
#define SDLTEST_LOG_SUMMARY_FORMAT     "%s Summary: Total=%d " COLOR_GREEN "Passed=%d" COLOR_END " " COLOR_RED "Failed=%d" COLOR_END " " COLOR_BLUE "Skipped=%d" COLOR_END
45
#define SDLTEST_LOG_SUMMARY_FORMAT_OK  "%s Summary: Total=%d " COLOR_GREEN "Passed=%d" COLOR_END " " COLOR_GREEN "Failed=%d" COLOR_END " " COLOR_BLUE "Skipped=%d" COLOR_END
46

47
/* Final result message format */
48
#define SDLTEST_FINAL_RESULT_FORMAT COLOR_YELLOW ">>> %s '%s':" COLOR_END " %s\n"
49

50
/* ! Timeout for single test case execution */
51
static Uint32 SDLTest_TestCaseTimeout = 3600;
52

53
/**
54
 * Generates a random run seed string for the harness. The generated seed
55
 * will contain alphanumeric characters (0-9A-Z).
56
 *
57
 * Note: The returned string needs to be deallocated by the caller.
58
 *
59
 * \param length The length of the seed string to generate
60
 *
61
 * \returns The generated seed string
62
 */
63
char *SDLTest_GenerateRunSeed(const int length)
64
{
65
    char *seed = NULL;
66
    Uint64 randomContext = SDL_GetPerformanceCounter();
67
    int counter;
68

69
    /* Sanity check input */
70
    if (length <= 0) {
71
        SDLTest_LogError("The length of the harness seed must be >0.");
72
        return NULL;
73
    }
74

75
    /* Allocate output buffer */
76
    seed = (char *)SDL_malloc((length + 1) * sizeof(char));
77
    if (!seed) {
78
        SDLTest_LogError("SDL_malloc for run seed output buffer failed.");
79
        return NULL;
80
    }
81

82
    /* Generate a random string of alphanumeric characters */
83
    for (counter = 0; counter < length; counter++) {
84
        char ch;
85
        int v = SDL_rand_r(&randomContext, 10 + 26);
86
        if (v < 10) {
87
            ch = (char)('0' + v);
88
        } else {
89
            ch = (char)('A' + v - 10);
90
        }
91
        seed[counter] = ch;
92
    }
93
    seed[length] = '\0';
94

95
    return seed;
96
}
97

98
/**
99
 * Generates an execution key for the fuzzer.
100
 *
101
 * \param runSeed        The run seed to use
102
 * \param suiteName      The name of the test suite
103
 * \param testName       The name of the test
104
 * \param iteration      The iteration count
105
 *
106
 * \returns The generated execution key to initialize the fuzzer with.
107
 *
108
 */
109
static Uint64 SDLTest_GenerateExecKey(const char *runSeed, const char *suiteName, const char *testName, int iteration)
110
{
111
    SDLTest_Md5Context md5Context;
112
    Uint64 *keys;
113
    char iterationString[16];
114
    size_t runSeedLength;
115
    size_t suiteNameLength;
116
    size_t testNameLength;
117
    size_t iterationStringLength;
118
    size_t entireStringLength;
119
    char *buffer;
120

121
    if (!runSeed || runSeed[0] == '\0') {
122
        SDLTest_LogError("Invalid runSeed string.");
123
        return 0;
124
    }
125

126
    if (!suiteName || suiteName[0] == '\0') {
127
        SDLTest_LogError("Invalid suiteName string.");
128
        return 0;
129
    }
130

131
    if (!testName || testName[0] == '\0') {
132
        SDLTest_LogError("Invalid testName string.");
133
        return 0;
134
    }
135

136
    if (iteration <= 0) {
137
        SDLTest_LogError("Invalid iteration count.");
138
        return 0;
139
    }
140

141
    /* Convert iteration number into a string */
142
    SDL_memset(iterationString, 0, sizeof(iterationString));
143
    (void)SDL_snprintf(iterationString, sizeof(iterationString) - 1, "%d", iteration);
144

145
    /* Combine the parameters into single string */
146
    runSeedLength = SDL_strlen(runSeed);
147
    suiteNameLength = SDL_strlen(suiteName);
148
    testNameLength = SDL_strlen(testName);
149
    iterationStringLength = SDL_strlen(iterationString);
150
    entireStringLength = runSeedLength + suiteNameLength + testNameLength + iterationStringLength + 1;
151
    buffer = (char *)SDL_malloc(entireStringLength);
152
    if (!buffer) {
153
        SDLTest_LogError("Failed to allocate buffer for execKey generation.");
154
        return 0;
155
    }
156
    (void)SDL_snprintf(buffer, entireStringLength, "%s%s%s%d", runSeed, suiteName, testName, iteration);
157

158
    /* Hash string and use half of the digest as 64bit exec key */
159
    SDLTest_Md5Init(&md5Context);
160
    SDLTest_Md5Update(&md5Context, (unsigned char *)buffer, (unsigned int)entireStringLength);
161
    SDLTest_Md5Final(&md5Context);
162
    SDL_free(buffer);
163
    keys = (Uint64 *)md5Context.digest;
164

165
    return keys[0];
166
}
167

168
/**
169
 * Set timeout handler for test.
170
 *
171
 * Note: SDL_Init(SDL_INIT_TIMER) will be called if it wasn't done so before.
172
 *
173
 * \param timeout Timeout interval in seconds.
174
 * \param callback Function that will be called after timeout has elapsed.
175
 *
176
 * \return Timer id or -1 on failure.
177
 */
178
static SDL_TimerID SDLTest_SetTestTimeout(int timeout, void(SDLCALL *callback)(void))
179
{
180
    Uint32 timeoutInMilliseconds;
181
    SDL_TimerID timerID;
182

183
    if (!callback) {
184
        SDLTest_LogError("Timeout callback can't be NULL");
185
        return 0;
186
    }
187

188
    if (timeout < 0) {
189
        SDLTest_LogError("Timeout value must be bigger than zero.");
190
        return 0;
191
    }
192

193
    /* Init SDL timer if not initialized before */
194
    if (!SDL_WasInit(SDL_INIT_TIMER)) {
195
        if (!SDL_InitSubSystem(SDL_INIT_TIMER)) {
196
            SDLTest_LogError("Failed to init timer subsystem: %s", SDL_GetError());
197
            return 0;
198
        }
199
    }
200

201
    /* Set timer */
202
    timeoutInMilliseconds = timeout * 1000;
203
    timerID = SDL_AddTimer(timeoutInMilliseconds, (SDL_TimerCallback)callback, 0x0);
204
    if (timerID == 0) {
205
        SDLTest_LogError("Creation of SDL timer failed: %s", SDL_GetError());
206
        return 0;
207
    }
208

209
    return timerID;
210
}
211

212
/**
213
 * Timeout handler. Aborts test run and exits harness process.
214
 */
215
#ifdef __WATCOMC__
216
#pragma aux SDLTest_BailOut aborts;
217
#endif
218
static SDL_NORETURN void SDLCALL SDLTest_BailOut(void)
219
{
220
    SDLTest_LogError("TestCaseTimeout timer expired. Aborting test run.");
221
    exit(TEST_ABORTED); /* bail out from the test */
222
}
223

224
/**
225
 * Execute a test using the given execution key.
226
 *
227
 * \param testSuite Suite containing the test case.
228
 * \param testCase Case to execute.
229
 * \param execKey Execution key for the fuzzer.
230
 * \param forceTestRun Force test to run even if test was disabled in suite.
231
 *
232
 * \returns Test case result.
233
 */
234
static int SDLTest_RunTest(SDLTest_TestSuiteReference *testSuite, const SDLTest_TestCaseReference *testCase, Uint64 execKey, SDL_bool forceTestRun)
235
{
236
    SDL_TimerID timer = 0;
237
    int testCaseResult = 0;
238
    int testResult = 0;
239
    int fuzzerCount;
240

241
    if (!testSuite || !testCase || !testSuite->name || !testCase->name) {
242
        SDLTest_LogError("Setup failure: testSuite or testCase references NULL");
243
        return TEST_RESULT_SETUP_FAILURE;
244
    }
245

246
    if (!testCase->enabled && forceTestRun == SDL_FALSE) {
247
        SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, "Skipped (Disabled)");
248
        return TEST_RESULT_SKIPPED;
249
    }
250

251
    /* Initialize fuzzer */
252
    SDLTest_FuzzerInit(execKey);
253

254
    /* Reset assert tracker */
255
    SDLTest_ResetAssertSummary();
256

257
    /* Set timeout timer */
258
    timer = SDLTest_SetTestTimeout(SDLTest_TestCaseTimeout, SDLTest_BailOut);
259

260
    /* Maybe run suite initializer function */
261
    if (testSuite->testSetUp) {
262
        testSuite->testSetUp(0x0);
263
        if (SDLTest_AssertSummaryToTestResult() == TEST_RESULT_FAILED) {
264
            SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Suite Setup", testSuite->name, COLOR_RED "Failed" COLOR_END);
265
            return TEST_RESULT_SETUP_FAILURE;
266
        }
267
    }
268

269
    /* Run test case function */
270
    testCaseResult = testCase->testCase(0x0);
271

272
    /* Convert test execution result into harness result */
273
    if (testCaseResult == TEST_SKIPPED) {
274
        /* Test was programmatically skipped */
275
        testResult = TEST_RESULT_SKIPPED;
276
    } else if (testCaseResult == TEST_STARTED) {
277
        /* Test did not return a TEST_COMPLETED value; assume it failed */
278
        testResult = TEST_RESULT_FAILED;
279
    } else if (testCaseResult == TEST_ABORTED) {
280
        /* Test was aborted early; assume it failed */
281
        testResult = TEST_RESULT_FAILED;
282
    } else {
283
        /* Perform failure analysis based on asserts */
284
        testResult = SDLTest_AssertSummaryToTestResult();
285
    }
286

287
    /* Maybe run suite cleanup function (ignore failed asserts) */
288
    if (testSuite->testTearDown) {
289
        testSuite->testTearDown(0x0);
290
    }
291

292
    /* Cancel timeout timer */
293
    if (timer) {
294
        SDL_RemoveTimer(timer);
295
    }
296

297
    /* Report on asserts and fuzzer usage */
298
    fuzzerCount = SDLTest_GetFuzzerInvocationCount();
299
    if (fuzzerCount > 0) {
300
        SDLTest_Log("Fuzzer invocations: %d", fuzzerCount);
301
    }
302

303
    /* Final log based on test execution result */
304
    if (testCaseResult == TEST_SKIPPED) {
305
        /* Test was programmatically skipped */
306
        SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, COLOR_BLUE "Skipped (Programmatically)" COLOR_END);
307
    } else if (testCaseResult == TEST_STARTED) {
308
        /* Test did not return a TEST_COMPLETED value; assume it failed */
309
        SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, COLOR_RED "Failed (test started, but did not return TEST_COMPLETED)" COLOR_END);
310
    } else if (testCaseResult == TEST_ABORTED) {
311
        /* Test was aborted early; assume it failed */
312
        SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", testCase->name, COLOR_RED "Failed (Aborted)" COLOR_END);
313
    } else {
314
        SDLTest_LogAssertSummary();
315
    }
316

317
    return testResult;
318
}
319

320
/* Prints summary of all suites/tests contained in the given reference */
321
#if 0
322
static void SDLTest_LogTestSuiteSummary(SDLTest_TestSuiteReference *testSuites)
323
{
324
    int suiteCounter;
325
    int testCounter;
326
    SDLTest_TestSuiteReference *testSuite;
327
    SDLTest_TestCaseReference *testCase;
328

329
    /* Loop over all suites */
330
    suiteCounter = 0;
331
    while (&testSuites[suiteCounter]) {
332
        testSuite=&testSuites[suiteCounter];
333
        suiteCounter++;
334
        SDLTest_Log("Test Suite %i - %s\n", suiteCounter,
335
            (testSuite->name) ? testSuite->name : SDLTEST_INVALID_NAME_FORMAT);
336

337
        /* Loop over all test cases */
338
        testCounter = 0;
339
        while (testSuite->testCases[testCounter]) {
340
            testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
341
            testCounter++;
342
            SDLTest_Log("  Test Case %i - %s: %s", testCounter,
343
                (testCase->name) ? testCase->name : SDLTEST_INVALID_NAME_FORMAT,
344
                (testCase->description) ? testCase->description : SDLTEST_INVALID_NAME_FORMAT);
345
        }
346
    }
347
}
348
#endif
349

350
/* Gets a timer value in seconds */
351
static float GetClock(void)
352
{
353
    float currentClock = SDL_GetPerformanceCounter() / (float)SDL_GetPerformanceFrequency();
354
    return currentClock;
355
}
356

357
/**
358
 * Execute a test suite using the given run seed and execution key.
359
 *
360
 * The filter string is matched to the suite name (full comparison) to select a single suite,
361
 * or if no suite matches, it is matched to the test names (full comparison) to select a single test.
362
 *
363
 * \param testSuites Suites containing the test case.
364
 * \param userRunSeed Custom run seed provided by user, or NULL to autogenerate one.
365
 * \param userExecKey Custom execution key provided by user, or 0 to autogenerate one.
366
 * \param filter Filter specification. NULL disables. Case sensitive.
367
 * \param testIterations Number of iterations to run each test case.
368
 * \param randomOrder allow to run suites and tests in random order when there is no filter
369
 *
370
 * \returns Test run result; 0 when all tests passed, 1 if any tests failed.
371
 */
372
int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *userRunSeed, Uint64 userExecKey, const char *filter, int testIterations, SDL_bool randomOrder)
373
{
374
    int totalNumberOfTests = 0;
375
    int failedNumberOfTests = 0;
376
    int suiteCounter;
377
    int testCounter;
378
    int iterationCounter;
379
    SDLTest_TestSuiteReference *testSuite;
380
    const SDLTest_TestCaseReference *testCase;
381
    const char *runSeed = NULL;
382
    const char *currentSuiteName;
383
    const char *currentTestName;
384
    Uint64 execKey;
385
    float runStartSeconds;
386
    float suiteStartSeconds;
387
    float testStartSeconds;
388
    float runEndSeconds;
389
    float suiteEndSeconds;
390
    float testEndSeconds;
391
    float runtime;
392
    int suiteFilter = 0;
393
    const char *suiteFilterName = NULL;
394
    int testFilter = 0;
395
    const char *testFilterName = NULL;
396
    SDL_bool forceTestRun = SDL_FALSE;
397
    int testResult = 0;
398
    int runResult = 0;
399
    int totalTestFailedCount = 0;
400
    int totalTestPassedCount = 0;
401
    int totalTestSkippedCount = 0;
402
    int testFailedCount = 0;
403
    int testPassedCount = 0;
404
    int testSkippedCount = 0;
405
    int countSum = 0;
406
    const SDLTest_TestCaseReference **failedTests;
407
    char generatedSeed[16 + 1];
408
    int nbSuites = 0;
409
    int i = 0;
410
    int *arraySuites = NULL;
411

412
    /* Sanitize test iterations */
413
    if (testIterations < 1) {
414
        testIterations = 1;
415
    }
416

417
    /* Generate run see if we don't have one already */
418
    if (!userRunSeed || userRunSeed[0] == '\0') {
419
        char *tmp = SDLTest_GenerateRunSeed(16);
420
        if (!tmp) {
421
            SDLTest_LogError("Generating a random seed failed");
422
            return 2;
423
        }
424
        SDL_memcpy(generatedSeed, tmp, 16 + 1);
425
        SDL_free(tmp);
426
        runSeed = generatedSeed;
427
    } else {
428
        runSeed = userRunSeed;
429
    }
430

431
    /* Reset per-run counters */
432
    totalTestFailedCount = 0;
433
    totalTestPassedCount = 0;
434
    totalTestSkippedCount = 0;
435

436
    /* Take time - run start */
437
    runStartSeconds = GetClock();
438

439
    /* Log run with fuzzer parameters */
440
    SDLTest_Log("::::: Test Run /w seed '%s' started\n", runSeed);
441

442
    /* Count the total number of tests */
443
    suiteCounter = 0;
444
    while (testSuites[suiteCounter]) {
445
        testSuite = testSuites[suiteCounter];
446
        suiteCounter++;
447
        testCounter = 0;
448
        while (testSuite->testCases[testCounter]) {
449
            testCounter++;
450
            totalNumberOfTests++;
451
        }
452
    }
453

454
    if (totalNumberOfTests == 0) {
455
        SDLTest_LogError("No tests to run?");
456
        return -1;
457
    }
458

459
    /* Pre-allocate an array for tracking failed tests (potentially all test cases) */
460
    failedTests = (const SDLTest_TestCaseReference **)SDL_malloc(totalNumberOfTests * sizeof(SDLTest_TestCaseReference *));
461
    if (!failedTests) {
462
        SDLTest_LogError("Unable to allocate cache for failed tests");
463
        return -1;
464
    }
465

466
    /* Initialize filtering */
467
    if (filter && filter[0] != '\0') {
468
        /* Loop over all suites to check if we have a filter match */
469
        suiteCounter = 0;
470
        while (testSuites[suiteCounter] && suiteFilter == 0) {
471
            testSuite = testSuites[suiteCounter];
472
            suiteCounter++;
473
            if (testSuite->name && SDL_strcasecmp(filter, testSuite->name) == 0) {
474
                /* Matched a suite name */
475
                suiteFilter = 1;
476
                suiteFilterName = testSuite->name;
477
                SDLTest_Log("Filtering: running only suite '%s'", suiteFilterName);
478
                break;
479
            }
480

481
            /* Within each suite, loop over all test cases to check if we have a filter match */
482
            testCounter = 0;
483
            while (testSuite->testCases[testCounter] && testFilter == 0) {
484
                testCase = testSuite->testCases[testCounter];
485
                testCounter++;
486
                if (testCase->name && SDL_strcasecmp(filter, testCase->name) == 0) {
487
                    /* Matched a test name */
488
                    suiteFilter = 1;
489
                    suiteFilterName = testSuite->name;
490
                    testFilter = 1;
491
                    testFilterName = testCase->name;
492
                    SDLTest_Log("Filtering: running only test '%s' in suite '%s'", testFilterName, suiteFilterName);
493
                    break;
494
                }
495
            }
496
        }
497

498
        if (suiteFilter == 0 && testFilter == 0) {
499
            SDLTest_LogError("Filter '%s' did not match any test suite/case.", filter);
500
            for (suiteCounter = 0; testSuites[suiteCounter]; ++suiteCounter) {
501
                testSuite = testSuites[suiteCounter];
502
                if (testSuite->name) {
503
                    SDLTest_Log("Test suite: %s", testSuite->name);
504
                }
505

506
                /* Within each suite, loop over all test cases to check if we have a filter match */
507
                for (testCounter = 0; testSuite->testCases[testCounter]; ++testCounter) {
508
                    testCase = testSuite->testCases[testCounter];
509
                    SDLTest_Log("      test: %s%s", testCase->name, testCase->enabled ? "" : " (disabled)");
510
                }
511
            }
512
            SDLTest_Log("Exit code: 2");
513
            SDL_free((void *)failedTests);
514
            return 2;
515
        }
516

517
        randomOrder = SDL_FALSE;
518
    }
519

520
    /* Number of test suites */
521
    while (testSuites[nbSuites]) {
522
        nbSuites++;
523
    }
524

525
    arraySuites = SDL_malloc(nbSuites * sizeof(int));
526
    if (!arraySuites) {
527
        return SDL_OutOfMemory();
528
    }
529
    for (i = 0; i < nbSuites; i++) {
530
        arraySuites[i] = i;
531
    }
532

533
    /* Mix the list of suites to run them in random order */
534
    {
535
        /* Exclude last test "subsystemsTestSuite" which is said to interfer with other tests */
536
        nbSuites--;
537

538
        if (userExecKey != 0) {
539
            execKey = userExecKey;
540
        } else {
541
            /* dummy values to have random numbers working */
542
            execKey = SDLTest_GenerateExecKey(runSeed, "random testSuites", "initialisation", 1);
543
        }
544

545
        /* Initialize fuzzer */
546
        SDLTest_FuzzerInit(execKey);
547

548
        i = 100;
549
        while (i--) {
550
            int a, b;
551
            int tmp;
552
            a = SDLTest_RandomIntegerInRange(0, nbSuites - 1);
553
            b = SDLTest_RandomIntegerInRange(0, nbSuites - 1);
554
            /*
555
             * NB: prevent swapping here to make sure the tests start with the same
556
             * random seed (whether they are run in order or not).
557
             * So we consume same number of SDLTest_RandomIntegerInRange() in all cases.
558
             *
559
             * If some random value were used at initialization before the tests start, the --seed wouldn't do the same with or without randomOrder.
560
             */
561
            /* Swap */
562
            if (randomOrder) {
563
                tmp = arraySuites[b];
564
                arraySuites[b] = arraySuites[a];
565
                arraySuites[a] = tmp;
566
            }
567
        }
568

569
        /* re-add last lest */
570
        nbSuites++;
571
    }
572

573
    /* Loop over all suites */
574
    for (i = 0; i < nbSuites; i++) {
575
        suiteCounter = arraySuites[i];
576
        testSuite = testSuites[suiteCounter];
577
        currentSuiteName = (testSuite->name ? testSuite->name : SDLTEST_INVALID_NAME_FORMAT);
578
        suiteCounter++;
579

580
        /* Filter suite if flag set and we have a name */
581
        if (suiteFilter == 1 && suiteFilterName && testSuite->name &&
582
            SDL_strcasecmp(suiteFilterName, testSuite->name) != 0) {
583
            /* Skip suite */
584
            SDLTest_Log("===== Test Suite %i: '%s' " COLOR_BLUE "skipped" COLOR_END "\n",
585
                        suiteCounter,
586
                        currentSuiteName);
587
        } else {
588

589
            int nbTestCases = 0;
590
            int *arrayTestCases;
591
            int j;
592
            while (testSuite->testCases[nbTestCases]) {
593
                nbTestCases++;
594
            }
595

596
            arrayTestCases = SDL_malloc(nbTestCases * sizeof(int));
597
            if (!arrayTestCases) {
598
                return SDL_OutOfMemory();
599
            }
600
            for (j = 0; j < nbTestCases; j++) {
601
                arrayTestCases[j] = j;
602
            }
603

604
            /* Mix the list of testCases to run them in random order */
605
            j = 100;
606
            while (j--) {
607
                int a, b;
608
                int tmp;
609
                a = SDLTest_RandomIntegerInRange(0, nbTestCases - 1);
610
                b = SDLTest_RandomIntegerInRange(0, nbTestCases - 1);
611
                /* Swap */
612
                /* See previous note */
613
                if (randomOrder) {
614
                    tmp = arrayTestCases[b];
615
                    arrayTestCases[b] = arrayTestCases[a];
616
                    arrayTestCases[a] = tmp;
617
                }
618
            }
619

620
            /* Reset per-suite counters */
621
            testFailedCount = 0;
622
            testPassedCount = 0;
623
            testSkippedCount = 0;
624

625
            /* Take time - suite start */
626
            suiteStartSeconds = GetClock();
627

628
            /* Log suite started */
629
            SDLTest_Log("===== Test Suite %i: '%s' started\n",
630
                        suiteCounter,
631
                        currentSuiteName);
632

633
            /* Loop over all test cases */
634
            for (j = 0; j < nbTestCases; j++) {
635
                testCounter = arrayTestCases[j];
636
                testCase = testSuite->testCases[testCounter];
637
                currentTestName = (testCase->name ? testCase->name : SDLTEST_INVALID_NAME_FORMAT);
638
                testCounter++;
639

640
                /* Filter tests if flag set and we have a name */
641
                if (testFilter == 1 && testFilterName && testCase->name &&
642
                    SDL_strcasecmp(testFilterName, testCase->name) != 0) {
643
                    /* Skip test */
644
                    SDLTest_Log("===== Test Case %i.%i: '%s' " COLOR_BLUE "skipped" COLOR_END "\n",
645
                                suiteCounter,
646
                                testCounter,
647
                                currentTestName);
648
                } else {
649
                    /* Override 'disabled' flag if we specified a test filter (i.e. force run for debugging) */
650
                    if (testFilter == 1 && !testCase->enabled) {
651
                        SDLTest_Log("Force run of disabled test since test filter was set");
652
                        forceTestRun = SDL_TRUE;
653
                    }
654

655
                    /* Take time - test start */
656
                    testStartSeconds = GetClock();
657

658
                    /* Log test started */
659
                    SDLTest_Log(COLOR_YELLOW "----- Test Case %i.%i: '%s' started" COLOR_END,
660
                                suiteCounter,
661
                                testCounter,
662
                                currentTestName);
663
                    if (testCase->description && testCase->description[0] != '\0') {
664
                        SDLTest_Log("Test Description: '%s'",
665
                                    (testCase->description) ? testCase->description : SDLTEST_INVALID_NAME_FORMAT);
666
                    }
667

668
                    /* Loop over all iterations */
669
                    iterationCounter = 0;
670
                    while (iterationCounter < testIterations) {
671
                        iterationCounter++;
672

673
                        if (userExecKey != 0) {
674
                            execKey = userExecKey;
675
                        } else {
676
                            execKey = SDLTest_GenerateExecKey(runSeed, testSuite->name, testCase->name, iterationCounter);
677
                        }
678

679
                        SDLTest_Log("Test Iteration %i: execKey %" SDL_PRIu64, iterationCounter, execKey);
680
                        testResult = SDLTest_RunTest(testSuite, testCase, execKey, forceTestRun);
681

682
                        if (testResult == TEST_RESULT_PASSED) {
683
                            testPassedCount++;
684
                            totalTestPassedCount++;
685
                        } else if (testResult == TEST_RESULT_SKIPPED) {
686
                            testSkippedCount++;
687
                            totalTestSkippedCount++;
688
                        } else {
689
                            testFailedCount++;
690
                            totalTestFailedCount++;
691
                        }
692
                    }
693

694
                    /* Take time - test end */
695
                    testEndSeconds = GetClock();
696
                    runtime = testEndSeconds - testStartSeconds;
697
                    if (runtime < 0.0f) {
698
                        runtime = 0.0f;
699
                    }
700

701
                    if (testIterations > 1) {
702
                        /* Log test runtime */
703
                        SDLTest_Log("Runtime of %i iterations: %.1f sec", testIterations, runtime);
704
                        SDLTest_Log("Average Test runtime: %.5f sec", runtime / (float)testIterations);
705
                    } else {
706
                        /* Log test runtime */
707
                        SDLTest_Log("Total Test runtime: %.1f sec", runtime);
708
                    }
709

710
                    /* Log final test result */
711
                    switch (testResult) {
712
                    case TEST_RESULT_PASSED:
713
                        SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", currentTestName, COLOR_GREEN "Passed" COLOR_END);
714
                        break;
715
                    case TEST_RESULT_FAILED:
716
                        SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", currentTestName, COLOR_RED "Failed" COLOR_END);
717
                        break;
718
                    case TEST_RESULT_NO_ASSERT:
719
                        SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", currentTestName, COLOR_BLUE "No Asserts" COLOR_END);
720
                        break;
721
                    }
722

723
                    /* Collect failed test case references for repro-step display */
724
                    if (testResult == TEST_RESULT_FAILED) {
725
                        failedTests[failedNumberOfTests] = testCase;
726
                        failedNumberOfTests++;
727
                    }
728
                }
729
            }
730

731
            /* Take time - suite end */
732
            suiteEndSeconds = GetClock();
733
            runtime = suiteEndSeconds - suiteStartSeconds;
734
            if (runtime < 0.0f) {
735
                runtime = 0.0f;
736
            }
737

738
            /* Log suite runtime */
739
            SDLTest_Log("Total Suite runtime: %.1f sec", runtime);
740

741
            /* Log summary and final Suite result */
742
            countSum = testPassedCount + testFailedCount + testSkippedCount;
743
            if (testFailedCount == 0) {
744
                SDLTest_Log(SDLTEST_LOG_SUMMARY_FORMAT_OK, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
745
                SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Suite", currentSuiteName, COLOR_GREEN "Passed" COLOR_END);
746
            } else {
747
                SDLTest_LogError(SDLTEST_LOG_SUMMARY_FORMAT, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
748
                SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Suite", currentSuiteName, COLOR_RED "Failed" COLOR_END);
749
            }
750

751
            SDL_free(arrayTestCases);
752
        }
753
    }
754

755
    SDL_free(arraySuites);
756

757
    /* Take time - run end */
758
    runEndSeconds = GetClock();
759
    runtime = runEndSeconds - runStartSeconds;
760
    if (runtime < 0.0f) {
761
        runtime = 0.0f;
762
    }
763

764
    /* Log total runtime */
765
    SDLTest_Log("Total Run runtime: %.1f sec", runtime);
766

767
    /* Log summary and final run result */
768
    countSum = totalTestPassedCount + totalTestFailedCount + totalTestSkippedCount;
769
    if (totalTestFailedCount == 0) {
770
        runResult = 0;
771
        SDLTest_Log(SDLTEST_LOG_SUMMARY_FORMAT_OK, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
772
        SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Run /w seed", runSeed, COLOR_GREEN "Passed" COLOR_END);
773
    } else {
774
        runResult = 1;
775
        SDLTest_LogError(SDLTEST_LOG_SUMMARY_FORMAT, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
776
        SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Run /w seed", runSeed, COLOR_RED "Failed" COLOR_END);
777
    }
778

779
    /* Print repro steps for failed tests */
780
    if (failedNumberOfTests > 0) {
781
        SDLTest_Log("Harness input to repro failures:");
782
        for (testCounter = 0; testCounter < failedNumberOfTests; testCounter++) {
783
            SDLTest_Log(COLOR_RED " --seed %s --filter %s" COLOR_END, runSeed, failedTests[testCounter]->name);
784
        }
785
    }
786
    SDL_free((void *)failedTests);
787

788
    SDLTest_Log("Exit code: %d", runResult);
789
    return runResult;
790
}
791

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.