Skip to content
This repository has been archived by the owner on Feb 11, 2021. It is now read-only.

Commit

Permalink
Added to harness in test lib (work in progress)
Browse files Browse the repository at this point in the history
  • Loading branch information
ferzkopp committed Dec 5, 2012
1 parent 4c67e56 commit d975a7a
Show file tree
Hide file tree
Showing 4 changed files with 282 additions and 23 deletions.
6 changes: 6 additions & 0 deletions include/SDL_test_assert.h
Expand Up @@ -54,6 +54,12 @@ extern "C" {
*/ */
#define ASSERT_PASS 1 #define ASSERT_PASS 1


/*! \brief counts the failed asserts */
static Uint32 SDLTest_AssertsFailed = 0;

/*! \brief counts the passed asserts */
static Uint32 SDLTest_AssertsPassed = 0;

/** /**
* \brief Assert that logs and break execution flow on failures. * \brief Assert that logs and break execution flow on failures.
* *
Expand Down
17 changes: 6 additions & 11 deletions include/SDL_test_harness.h
Expand Up @@ -49,22 +49,17 @@ extern "C" {
#define TEST_ENABLED 1 #define TEST_ENABLED 1
#define TEST_DISABLED 0 #define TEST_DISABLED 0


//! Definitions of assert results
#define ASSERT_PASS 1
#define ASSERT_FAIL 0

//! Definition of all the possible test return values of the test case method //! Definition of all the possible test return values of the test case method
#define TEST_ABORTED -1 #define TEST_ABORTED -1
#define TEST_COMPLETED 0 #define TEST_COMPLETED 0
#define TEST_SKIPPED 1 #define TEST_SKIPPED 1


//! Definition of all the possible test results for the harness //! Definition of all the possible test results for the harness
#define TEST_RESULT_PASSED 0 #define TEST_RESULT_PASSED 0
#define TEST_RESULT_FAILED 1 #define TEST_RESULT_FAILED 1
#define TEST_RESULT_NO_ASSERT 2 #define TEST_RESULT_NO_ASSERT 2
#define TEST_RESULT_SKIPPED 3 #define TEST_RESULT_SKIPPED 3
#define TEST_RESULT_KILLED 4 #define TEST_RESULT_SETUP_FAILURE 4
#define TEST_RESULT_SETUP_FAILURE 5


//!< Function pointer to a test case setup function (run before every test) //!< Function pointer to a test case setup function (run before every test)
typedef void (*SDLTest_TestCaseSetUpFp)(void *arg); typedef void (*SDLTest_TestCaseSetUpFp)(void *arg);
Expand Down Expand Up @@ -92,7 +87,7 @@ typedef struct SDLTest_TestCaseReference {
/** /**
* Holds information about a test suite (multiple test cases). * Holds information about a test suite (multiple test cases).
*/ */
typedef struct TestSuiteReference { typedef struct SDLTest_TestSuiteReference {
/*!< "PlatformSuite" */ /*!< "PlatformSuite" */
char *name; char *name;
/*!< The function that is run before each test. NULL skips. */ /*!< The function that is run before each test. NULL skips. */
Expand All @@ -101,7 +96,7 @@ typedef struct TestSuiteReference {
const SDLTest_TestCaseReference **testCases; const SDLTest_TestCaseReference **testCases;
/*!< The function that is run after each test. NULL skips. */ /*!< The function that is run after each test. NULL skips. */
SDLTest_TestCaseTearDownFp testTearDown; SDLTest_TestCaseTearDownFp testTearDown;
} TestSuiteReference; } SDLTest_TestSuiteReference;


/* Ends C function definitions when using C++ */ /* Ends C function definitions when using C++ */
#ifdef __cplusplus #ifdef __cplusplus
Expand Down
12 changes: 3 additions & 9 deletions src/test/SDL_test_assert.c
Expand Up @@ -29,14 +29,8 @@


#include "SDL_test.h" #include "SDL_test.h"


/*! \brief counts the failed asserts */
static Uint32 SDLTest_AssertsFailed = 0;

/*! \brief counts the passed asserts */
static Uint32 SDLTest_AssertsPassed = 0;

/* Assert check message format */ /* Assert check message format */
const char *SDLTest_AssertCheckFmt = "Assert %s: %s"; const char *SDLTest_AssertCheckFmt = "Assert '%s': %s";


/* Assert summary message format */ /* Assert summary message format */
const char *SDLTest_AssertSummaryFmt = "Assert Summary: Total=%d Passed=%d Failed=%d"; const char *SDLTest_AssertSummaryFmt = "Assert Summary: Total=%d Passed=%d Failed=%d";
Expand All @@ -58,12 +52,12 @@ int SDLTest_AssertCheck(int assertCondition, char *assertDescription)
if (assertCondition == ASSERT_FAIL) if (assertCondition == ASSERT_FAIL)
{ {
SDLTest_AssertsFailed++; SDLTest_AssertsFailed++;
SDLTest_LogError(fmt, "Failed", assertDescription); SDLTest_LogError(fmt, assertDescription, "Failed");
} }
else else
{ {
SDLTest_AssertsPassed++; SDLTest_AssertsPassed++;
SDLTest_Log(fmt, "Passed", assertDescription); SDLTest_Log(fmt, assertDescription, "Passed");
} }


return assertCondition; return assertCondition;
Expand Down
270 changes: 267 additions & 3 deletions src/test/SDL_test_harness.c
Expand Up @@ -23,10 +23,19 @@


#include "SDL_test.h" #include "SDL_test.h"


#include <stdio.h> #include <stdio.h>
#include <stdlib.h>
#include <string.h> #include <string.h>
#include <time.h>


// TODO: port over remaining harness /* Assert check message format */
const char *SDLTest_TestCheckFmt = "Test '%s': %s";

/* Invalid test name/description message format */
const char *SDLTest_InvalidNameFmt = "(Invalid)";

/*! \brief Timeout for single test case execution */
static Uint32 SDLTest_TestCaseTimeout = 3600;


/** /**
* Generates a random run seed string for the harness. The generated seed * Generates a random run seed string for the harness. The generated seed
Expand Down Expand Up @@ -155,7 +164,7 @@ SDLTest_GenerateExecKey(char *runSeed, char *suiteName, char *testName, int iter
* \return Timer id or -1 on failure. * \return Timer id or -1 on failure.
*/ */
SDL_TimerID SDL_TimerID
SetTestTimeout(int timeout, void (*callback)()) SDLTest_SetTestTimeout(int timeout, void (*callback)())
{ {
Uint32 timeoutInMilliseconds; Uint32 timeoutInMilliseconds;
SDL_TimerID timerID; SDL_TimerID timerID;
Expand Down Expand Up @@ -188,3 +197,258 @@ SetTestTimeout(int timeout, void (*callback)())


return timerID; return timerID;
} }

void
SDLTest_BailOut()
{
SDLTest_LogError("TestCaseTimeout timer expired. Aborting test run.");
exit(TEST_ABORTED); // bail out from the test
}

/**
* \brief Execute a test using the given execution key.
*
* \param testSuite Suite containing the test case.
* \param testCase Case to execute.
* \param execKey Execution key for the fuzzer.
*
* \returns Test case result.
*/
int
SDLTest_RunTest(SDLTest_TestSuiteReference *testSuite, SDLTest_TestCaseReference *testCase, Uint64 execKey)
{
SDL_TimerID timer = 0;

if (testSuite==NULL || testCase==NULL || testSuite->name==NULL || testCase->name==NULL)
{
SDLTest_LogError("Setup failure: testSuite or testCase references NULL");
return TEST_RESULT_SETUP_FAILURE;
}

if (!testCase->enabled)
{
SDLTest_Log((char *)SDLTest_TestCheckFmt, testCase->name, "Skipped");
return TEST_RESULT_SKIPPED;
}

// Initialize fuzzer
SDLTest_FuzzerInit(execKey);

// Reset assert tracker
SDLTest_ResetAssertSummary();

// Set timeout timer
timer = SDLTest_SetTestTimeout(SDLTest_TestCaseTimeout, SDLTest_BailOut);

// Maybe run suite initalizer function
if (testSuite->testSetUp) {
testSuite->testSetUp(0x0);
if (SDLTest_AssertsFailed > 0) {
SDLTest_LogError((char *)SDLTest_TestCheckFmt, testSuite->name, "Failed");
return TEST_RESULT_SETUP_FAILURE;
}
}

// Run test case function
testCase->testCase(0x0);

// Maybe run suite cleanup function
if (testSuite->testTearDown) {
testSuite->testTearDown(0x0);
}

// Cancel timeout timer
if (timer) {
SDL_RemoveTimer(timer);
}

// Report on asserts and fuzzer usage
SDLTest_Log("Fuzzer invocations: %d", SDLTest_GetFuzzerInvocationCount());
SDLTest_LogAssertSummary();

// Analyze assert count to determine test case result
if (SDLTest_AssertsFailed > 0) {
SDLTest_LogError((char *)SDLTest_TestCheckFmt, testCase->name, "Failed");
return TEST_RESULT_FAILED;
} else {
if (SDLTest_AssertsPassed > 0) {
SDLTest_Log((char *)SDLTest_TestCheckFmt, testCase->name, "Passed");
return TEST_RESULT_PASSED;
} else {
SDLTest_LogError((char *)SDLTest_TestCheckFmt, testCase->name, "No Asserts");
return TEST_RESULT_NO_ASSERT;
}
}
}

/* Prints summary of all suites/tests contained in the given reference */
void SDLTest_LogTestSuiteSummary(SDLTest_TestSuiteReference *testSuites)
{
int suiteCounter;
int testCounter;
SDLTest_TestSuiteReference *testSuite;
SDLTest_TestCaseReference *testCase;

// Loop over all suites
suiteCounter = 0;
while(&testSuites[suiteCounter]) {
testSuite=&testSuites[suiteCounter];
suiteCounter++;
SDLTest_Log("Test Suite %i - %s\n", suiteCounter,
(testSuite->name) ? testSuite->name : SDLTest_InvalidNameFmt);

// Loop over all test cases
testCounter = 0;
while(testSuite->testCases[testCounter])
{
testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
testCounter++;
SDLTest_Log(" Test Case %i - %s: %s", testCounter,
(testCase->name) ? testCase->name : SDLTest_InvalidNameFmt,
(testCase->description) ? testCase->description : SDLTest_InvalidNameFmt);
}
}
}


/**
* \brief Execute a test using the given execution key.
*
* \param testSuites Suites containing the test case.
* \param userRunSeed Custom run seed provided by user, or NULL to autogenerate one.
* \param userExecKey Custom execution key provided by user, or 0 to autogenerate one.
* \param testIterations Number of iterations to run each test case.
*
* \returns Test run result; 0 when all tests passed, 1 if any tests failed.
*/
int
SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites, char *userRunSeed, Uint64 userExecKey, int testIterations)
{
int suiteCounter;
int testCounter;
int iterationCounter;
SDLTest_TestSuiteReference *testSuite;
SDLTest_TestCaseReference *testCase;
char *runSeed;
Uint64 execKey;
Uint32 runStartTicks;
time_t runStartTimestamp;
Uint32 suiteStartTicks;
time_t suiteStartTimestamp;
Uint32 testStartTicks;
time_t testStartTimestamp;
Uint32 runEndTicks;
time_t runEndTimestamp;
Uint32 suiteEndTicks;
time_t suiteEndTimestamp;
Uint32 testEndTicks;
time_t testEndTimestamp;
int testResult;
int totalTestFailedCount, totalTestPassedCount, totalTestSkippedCount;
int testFailedCount, testPassedCount, testSkippedCount;

// Sanitize test iterations
if (testIterations < 1) {
testIterations = 1;
}

// Generate run see if we don't have one already
if (userRunSeed == NULL || strlen(userRunSeed) == 0) {
runSeed = SDLTest_GenerateRunSeed(16);
if (runSeed == NULL) {
SDLTest_LogError("Generating a random run seed failed");
return 2;
}
}

// Reset per-run counters
totalTestFailedCount = totalTestPassedCount = totalTestSkippedCount = 0;

// Take time - run start
runStartTicks = SDL_GetTicks();
runStartTimestamp = time(0);

// TODO log run started

// Loop over all suites
suiteCounter = 0;
while(&testSuites[suiteCounter]) {
testSuite=&testSuites[suiteCounter];
suiteCounter++;

// Reset per-suite counters
testFailedCount = testPassedCount = testSkippedCount = 0;

// Take time - suite start
suiteStartTicks = SDL_GetTicks();
suiteStartTimestamp = time(0);

// TODO log suite started
SDLTest_Log("Test Suite %i - %s\n", suiteCounter,
(testSuite->name) ? testSuite->name : SDLTest_InvalidNameFmt);

// Loop over all test cases
testCounter = 0;
while(testSuite->testCases[testCounter])
{
testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
testCounter++;

// Take time - test start
testStartTicks = SDL_GetTicks();
testStartTimestamp = time(0);

// TODO log test started
SDLTest_Log("Test Case %i - %s: %s", testCounter,
(testCase->name) ? testCase->name : SDLTest_InvalidNameFmt,
(testCase->description) ? testCase->description : SDLTest_InvalidNameFmt);

// Loop over all iterations
iterationCounter = 0;
while(iterationCounter < testIterations)
{
iterationCounter++;

if(userExecKey != 0) {
execKey = userExecKey;
} else {
execKey = SDLTest_GenerateExecKey(runSeed, testSuite->name, testCase->name, iterationCounter);
}

SDLTest_Log("Test Iteration %i: execKey %d", iterationCounter, execKey);
testResult = SDLTest_RunTest(testSuite, testCase, execKey);

if (testResult == TEST_RESULT_PASSED) {
testPassedCount++;
totalTestPassedCount++;
} else if (testResult == TEST_RESULT_SKIPPED) {
testSkippedCount++;
totalTestSkippedCount++;
} else {
testFailedCount++;
totalTestFailedCount++;
}
}

// Take time - test end
testEndTicks = SDL_GetTicks();
testEndTimestamp = time(0);

// TODO log test ended
}

// Take time - suite end
suiteEndTicks = SDL_GetTicks();
suiteEndTimestamp = time(0);

// TODO log suite ended
}

// Take time - run end
runEndTicks = SDL_GetTicks();
runEndTimestamp = time(0);

// TODO log run ended

return (totalTestFailedCount ? 1 : 0);
}

0 comments on commit d975a7a

Please sign in to comment.