diff --git a/test/test64bit.cpp b/test/test64bit.cpp index 1f8acd0eb46..4e3ff768a34 100644 --- a/test/test64bit.cpp +++ b/test/test64bit.cpp @@ -42,7 +42,8 @@ class Test64BitPortability : public TestFixture { TEST_CASE(assignment); } - void check(const char code[]) { +#define check(code) check_(code, __FILE__, __LINE__) + void check_(const char code[], const char* file, int line) { // Clear the error buffer.. errout.str(""); @@ -50,7 +51,7 @@ class Test64BitPortability : public TestFixture { Tokenizer tokenizer(&settings, this); LOAD_LIB_2(settings.library, "std.cfg"); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check char variable usage.. Check64BitPortability check64BitPortability(&tokenizer, &settings, this); diff --git a/test/testassert.cpp b/test/testassert.cpp index 759f8985875..537ebfdf196 100644 --- a/test/testassert.cpp +++ b/test/testassert.cpp @@ -30,14 +30,15 @@ class TestAssert : public TestFixture { private: Settings settings; - void check(const char code[], const char *filename = "test.cpp") { +#define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) + void check_(const char* file, int line, const char code[], const char *filename = "test.cpp") { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); // Check.. CheckAssert checkAssert; diff --git a/test/testastutils.cpp b/test/testastutils.cpp index 27cba353c76..d3cf86eb2c8 100644 --- a/test/testastutils.cpp +++ b/test/testastutils.cpp @@ -33,29 +33,30 @@ class TestAstUtils : public TestFixture { private: void run() OVERRIDE { - TEST_CASE(findLambdaEndToken); - TEST_CASE(findLambdaStartToken); - TEST_CASE(isNullOperand); - TEST_CASE(isReturnScope); - TEST_CASE(isSameExpression); - TEST_CASE(isVariableChanged); - TEST_CASE(isVariableChangedByFunctionCall); - TEST_CASE(nextAfterAstRightmostLeaf); + TEST_CASE(findLambdaEndTokenTest); + TEST_CASE(findLambdaStartTokenTest); + TEST_CASE(isNullOperandTest); + TEST_CASE(isReturnScopeTest); + TEST_CASE(isSameExpressionTest); + TEST_CASE(isVariableChangedTest); + TEST_CASE(isVariableChangedByFunctionCallTest); + TEST_CASE(nextAfterAstRightmostLeafTest); TEST_CASE(isUsedAsBool); } - bool findLambdaEndToken(const char code[]) { +#define findLambdaEndToken(code) findLambdaEndToken_(code, __FILE__, __LINE__) + bool findLambdaEndToken_(const char code[], const char* file, int line) { Settings settings; Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); - const Token * const tokEnd = ::findLambdaEndToken(tokenizer.tokens()); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); + const Token * const tokEnd = (::findLambdaEndToken)(tokenizer.tokens()); return tokEnd && tokEnd->next() == nullptr; } - void findLambdaEndToken() { + void findLambdaEndTokenTest() { const Token* nullTok = nullptr; - ASSERT(nullptr == ::findLambdaEndToken(nullTok)); + ASSERT(nullptr == (::findLambdaEndToken)(nullTok)); ASSERT_EQUALS(false, findLambdaEndToken("void f() { }")); ASSERT_EQUALS(true, findLambdaEndToken("[]{ }")); ASSERT_EQUALS(true, findLambdaEndToken("[]{ return 0; }")); @@ -77,17 +78,18 @@ class TestAstUtils : public TestFixture { ASSERT_EQUALS(true, findLambdaEndToken("[](void) constexpr -> const * const* int { return x; }")); } - bool findLambdaStartToken(const char code[]) { +#define findLambdaStartToken(code) findLambdaStartToken_(code, __FILE__, __LINE__) + bool findLambdaStartToken_(const char code[], const char* file, int line) { Settings settings; Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); - const Token * const tokStart = ::findLambdaStartToken(tokenizer.list.back()); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); + const Token * const tokStart = (::findLambdaStartToken)(tokenizer.list.back()); return tokStart && tokStart == tokenizer.list.front(); } - void findLambdaStartToken() { - ASSERT(nullptr == ::findLambdaStartToken(nullptr)); + void findLambdaStartTokenTest() { + ASSERT(nullptr == (::findLambdaStartToken)(nullptr)); ASSERT_EQUALS(false, findLambdaStartToken("void f() { }")); ASSERT_EQUALS(true, findLambdaStartToken("[]{ }")); ASSERT_EQUALS(true, findLambdaStartToken("[]{ return 0; }")); @@ -109,15 +111,16 @@ class TestAstUtils : public TestFixture { ASSERT_EQUALS(true, findLambdaStartToken("[](void) constexpr -> const * const* int { return x; }")); } - bool isNullOperand(const char code[]) { +#define isNullOperand(code) isNullOperand_(code, __FILE__, __LINE__) + bool isNullOperand_(const char code[], const char* file, int line) { Settings settings; Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); - return ::isNullOperand(tokenizer.tokens()); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); + return (::isNullOperand)(tokenizer.tokens()); } - void isNullOperand() { + void isNullOperandTest() { ASSERT_EQUALS(true, isNullOperand("(void*)0;")); ASSERT_EQUALS(true, isNullOperand("(void*)0U;")); ASSERT_EQUALS(true, isNullOperand("(void*)0x0LL;")); @@ -130,18 +133,19 @@ class TestAstUtils : public TestFixture { ASSERT_EQUALS(false, isNullOperand("(void*)1;")); } - bool isReturnScope(const char code[], int offset) { +#define isReturnScope(code, offset) isReturnScope_(code, offset, __FILE__, __LINE__) + bool isReturnScope_(const char code[], int offset, const char* file, int line) { Settings settings; Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); const Token * const tok = (offset < 0) ? tokenizer.list.back()->tokAt(1+offset) : tokenizer.tokens()->tokAt(offset); - return ::isReturnScope(tok); + return (::isReturnScope)(tok); } - void isReturnScope() { + void isReturnScopeTest() { ASSERT_EQUALS(true, isReturnScope("void f() { if (a) { return; } }", -2)); ASSERT_EQUALS(true, isReturnScope("int f() { if (a) { return {}; } }", -2)); // #8891 ASSERT_EQUALS(true, isReturnScope("std::string f() { if (a) { return std::string{}; } }", -2)); // #8891 @@ -160,19 +164,20 @@ class TestAstUtils : public TestFixture { ASSERT_EQUALS(true, isReturnScope("void positiveTokenOffset() { return; }", 7)); } - bool isSameExpression(const char code[], const char tokStr1[], const char tokStr2[]) { +#define isSameExpression(code, tokStr1, tokStr2) isSameExpression_(code, tokStr1, tokStr2, __FILE__, __LINE__) + bool isSameExpression_(const char code[], const char tokStr1[], const char tokStr2[], const char* file, int line) { Settings settings; Library library; Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); tokenizer.simplifyTokens1(""); const Token * const tok1 = Token::findsimplematch(tokenizer.tokens(), tokStr1, strlen(tokStr1)); const Token * const tok2 = Token::findsimplematch(tok1->next(), tokStr2, strlen(tokStr2)); - return ::isSameExpression(false, false, tok1, tok2, library, false, true, nullptr); + return (::isSameExpression)(false, false, tok1, tok2, library, false, true, nullptr); } - void isSameExpression() { + void isSameExpressionTest() { ASSERT_EQUALS(true, isSameExpression("x = 1 + 1;", "1", "1")); ASSERT_EQUALS(false, isSameExpression("x = 1 + 1u;", "1", "1u")); ASSERT_EQUALS(true, isSameExpression("x = 1.0 + 1.0;", "1.0", "1.0")); @@ -199,17 +204,18 @@ class TestAstUtils : public TestFixture { ASSERT_EQUALS(true, true); } - bool isVariableChanged(const char code[], const char startPattern[], const char endPattern[]) { +#define isVariableChanged(code, startPattern, endPattern) isVariableChanged_(code, startPattern, endPattern, __FILE__, __LINE__) + bool isVariableChanged_(const char code[], const char startPattern[], const char endPattern[], const char* file, int line) { Settings settings; Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); const Token * const tok1 = Token::findsimplematch(tokenizer.tokens(), startPattern, strlen(startPattern)); const Token * const tok2 = Token::findsimplematch(tokenizer.tokens(), endPattern, strlen(endPattern)); - return ::isVariableChanged(tok1,tok2,1,false,&settings,true); + return (::isVariableChanged)(tok1, tok2, 1, false, &settings, true); } - void isVariableChanged() { + void isVariableChangedTest() { // #8211 - no lhs for >> , do not crash isVariableChanged("void f() {\n" " int b;\n" @@ -221,16 +227,17 @@ class TestAstUtils : public TestFixture { "}\n", "= a", "}")); } - bool isVariableChangedByFunctionCall(const char code[], const char pattern[], bool *inconclusive) { +#define isVariableChangedByFunctionCall(code, pattern, inconclusive) isVariableChangedByFunctionCall_(code, pattern, inconclusive, __FILE__, __LINE__) + bool isVariableChangedByFunctionCall_(const char code[], const char pattern[], bool *inconclusive, const char* file, int line) { Settings settings; Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); const Token * const argtok = Token::findmatch(tokenizer.tokens(), pattern); - return ::isVariableChangedByFunctionCall(argtok, 0, &settings, inconclusive); + return (::isVariableChangedByFunctionCall)(argtok, 0, &settings, inconclusive); } - void isVariableChangedByFunctionCall() { + void isVariableChangedByFunctionCallTest() { const char *code; bool inconclusive; @@ -249,16 +256,17 @@ class TestAstUtils : public TestFixture { TODO_ASSERT_EQUALS(false, true, inconclusive); } - bool nextAfterAstRightmostLeaf(const char code[], const char parentPattern[], const char rightPattern[]) { +#define nextAfterAstRightmostLeaf(code, parentPattern, rightPattern) nextAfterAstRightmostLeaf_(code, parentPattern, rightPattern, __FILE__, __LINE__) + bool nextAfterAstRightmostLeaf_(const char code[], const char parentPattern[], const char rightPattern[], const char* file, int line) { Settings settings; Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); const Token * tok = Token::findsimplematch(tokenizer.tokens(), parentPattern, strlen(parentPattern)); - return Token::simpleMatch(::nextAfterAstRightmostLeaf(tok), rightPattern, strlen(rightPattern)); + return Token::simpleMatch((::nextAfterAstRightmostLeaf)(tok), rightPattern, strlen(rightPattern)); } - void nextAfterAstRightmostLeaf() { + void nextAfterAstRightmostLeafTest() { ASSERT_EQUALS(true, nextAfterAstRightmostLeaf("void f(int a, int b) { int x = a + b; }", "=", "; }")); ASSERT_EQUALS(true, nextAfterAstRightmostLeaf("int * g(int); void f(int a, int b) { int x = g(a); }", "=", "; }")); ASSERT_EQUALS(true, nextAfterAstRightmostLeaf("int * g(int); void f(int a, int b) { int x = g(a)[b]; }", "=", "; }")); diff --git a/test/testautovariables.cpp b/test/testautovariables.cpp index fa6d9d7c338..89e30da6108 100644 --- a/test/testautovariables.cpp +++ b/test/testautovariables.cpp @@ -30,7 +30,8 @@ class TestAutoVariables : public TestFixture { private: Settings settings; - void check(const char code[], bool inconclusive = false, const char* filename = "test.cpp") { +#define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) + void check_(const char* file, int line, const char code[], bool inconclusive = false, const char* filename = "test.cpp") { // Clear the error buffer.. errout.str(""); @@ -39,7 +40,7 @@ class TestAutoVariables : public TestFixture { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); CheckAutoVariables checkAutoVariables; checkAutoVariables.runChecks(&tokenizer, &settings, this); diff --git a/test/testbool.cpp b/test/testbool.cpp index c4e44c847aa..3d63745ce5f 100644 --- a/test/testbool.cpp +++ b/test/testbool.cpp @@ -75,7 +75,8 @@ class TestBool : public TestFixture { TEST_CASE(returnNonBoolClass); } - void check(const char code[], bool experimental = false, const char filename[] = "test.cpp") { +#define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) + void check_(const char* file, int line, const char code[], bool experimental = false, const char filename[] = "test.cpp") { // Clear the error buffer.. errout.str(""); @@ -84,7 +85,7 @@ class TestBool : public TestFixture { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); // Check... CheckBool checkBool(&tokenizer, &settings, this); diff --git a/test/testboost.cpp b/test/testboost.cpp index 27133893579..700aa3859fb 100644 --- a/test/testboost.cpp +++ b/test/testboost.cpp @@ -37,14 +37,15 @@ class TestBoost : public TestFixture { TEST_CASE(BoostForeachContainerModification); } - void check(const char code[]) { +#define check(code) check_(code, __FILE__, __LINE__) + void check_(const char code[], const char* file, int line) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check.. CheckBoost checkBoost; diff --git a/test/testbufferoverrun.cpp b/test/testbufferoverrun.cpp index ea7435d4ce7..987a344ff92 100755 --- a/test/testbufferoverrun.cpp +++ b/test/testbufferoverrun.cpp @@ -38,7 +38,8 @@ class TestBufferOverrun : public TestFixture { private: Settings settings0; - void check(const char code[], const char filename[] = "test.cpp") { +#define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) + void check_(const char* file, int line, const char code[], const char filename[] = "test.cpp") { // Clear the error buffer.. errout.str(""); @@ -47,17 +48,17 @@ class TestBufferOverrun : public TestFixture { // Tokenize.. Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); // Check for buffer overruns.. CheckBufferOverrun checkBufferOverrun; checkBufferOverrun.runChecks(&tokenizer, &settings0, this); } - void check(const char code[], const Settings &settings, const char filename[] = "test.cpp") { + void check_(const char* file, int line, const char code[], const Settings &settings, const char filename[] = "test.cpp") { Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); // Clear the error buffer.. errout.str(""); @@ -4461,22 +4462,23 @@ class TestBufferOverrun : public TestFixture { ASSERT_EQUALS("[test.cpp:3]: (portability) Undefined behaviour, pointer arithmetic 'arr+20' is out of bounds.\n", errout.str()); } - void ctu(const char code[]) { +#define ctu(code) ctu_(code, __FILE__, __LINE__) + void ctu_(const char code[], const char* file, int line) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); CTU::FileInfo *ctu = CTU::getFileInfo(&tokenizer); // Check code.. std::list fileInfo; - CheckBufferOverrun check(&tokenizer, &settings0, this); - fileInfo.push_back(check.getFileInfo(&tokenizer, &settings0)); - check.analyseWholeProgram(ctu, fileInfo, settings0, *this); + CheckBufferOverrun checkBO(&tokenizer, &settings0, this); + fileInfo.push_back(checkBO.getFileInfo(&tokenizer, &settings0)); + checkBO.analyseWholeProgram(ctu, fileInfo, settings0, *this); while (!fileInfo.empty()) { delete fileInfo.back(); fileInfo.pop_back(); diff --git a/test/testcharvar.cpp b/test/testcharvar.cpp index 4f4a4fe5c72..344eccc8b0f 100644 --- a/test/testcharvar.cpp +++ b/test/testcharvar.cpp @@ -41,14 +41,15 @@ class TestCharVar : public TestFixture { TEST_CASE(bitop); } - void check(const char code[]) { +#define check(code) check_(code, __FILE__, __LINE__) + void check_(const char code[], const char* file, int line) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check char variable usage.. CheckOther checkOther(&tokenizer, &settings, this); diff --git a/test/testclass.cpp b/test/testclass.cpp index 7da29e2f672..c737d052813 100644 --- a/test/testclass.cpp +++ b/test/testclass.cpp @@ -219,16 +219,17 @@ class TestClass : public TestFixture { TEST_CASE(override1); TEST_CASE(overrideCVRefQualifiers); - TEST_CASE(checkThisUseAfterFree); + TEST_CASE(thisUseAfterFree); TEST_CASE(unsafeClassRefMember); TEST_CASE(ctuOneDefinitionRule); - TEST_CASE(getFileInfo); + TEST_CASE(testGetFileInfo); } - void checkCopyCtorAndEqOperator(const char code[]) { +#define checkCopyCtorAndEqOperator(code) checkCopyCtorAndEqOperator_(code, __FILE__, __LINE__) + void checkCopyCtorAndEqOperator_(const char code[], const char* file, int line) { // Clear the error log errout.str(""); Settings settings; @@ -237,11 +238,11 @@ class TestClass : public TestFixture { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check.. CheckClass checkClass(&tokenizer, &settings, this); - checkClass.checkCopyCtorAndEqOperator(); + (checkClass.checkCopyCtorAndEqOperator)(); } void copyCtorAndEqOperator() { @@ -333,18 +334,19 @@ class TestClass : public TestFixture { ASSERT_EQUALS("", errout.str()); } - void checkExplicitConstructors(const char code[]) { +#define checkExplicitConstructors(code) checkExplicitConstructors_(code, __FILE__, __LINE__) + void checkExplicitConstructors_(const char code[], const char* file, int line) { // Clear the error log errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check.. CheckClass checkClass(&tokenizer, &settings0, this); - checkClass.checkExplicitConstructors(); + (checkClass.checkExplicitConstructors)(); } void explicitConstructors() { @@ -442,18 +444,19 @@ class TestClass : public TestFixture { ASSERT_EQUALS("[test.cpp:1]: (style) Struct 'A' has a constructor with 1 argument that is not explicit.\n", errout.str()); } - void checkDuplInheritedMembers(const char code[]) { +#define checkDuplInheritedMembers(code) checkDuplInheritedMembers_(code, __FILE__, __LINE__) + void checkDuplInheritedMembers_(const char code[], const char* file, int line) { // Clear the error log errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings1, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check.. CheckClass checkClass(&tokenizer, &settings1, this); - checkClass.checkDuplInheritedMembers(); + (checkClass.checkDuplInheritedMembers)(); } void duplInheritedMembers() { @@ -596,14 +599,15 @@ class TestClass : public TestFixture { ASSERT_EQUALS("", errout.str()); } - void checkCopyConstructor(const char code[]) { +#define checkCopyConstructor(code) checkCopyConstructor_(code, __FILE__, __LINE__) + void checkCopyConstructor_(const char code[], const char* file, int line) { // Clear the error log errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check.. CheckClass checkClass(&tokenizer, &settings0, this); @@ -1027,14 +1031,15 @@ class TestClass : public TestFixture { } // Check that operator Equal returns reference to this - void checkOpertorEqRetRefThis(const char code[]) { +#define checkOpertorEqRetRefThis(code) checkOpertorEqRetRefThis_(code, __FILE__, __LINE__) + void checkOpertorEqRetRefThis_(const char code[], const char* file, int line) { // Clear the error log errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check.. CheckClass checkClass(&tokenizer, &settings0, this); @@ -1490,14 +1495,15 @@ class TestClass : public TestFixture { } // Check that operator Equal checks for assignment to self - void checkOpertorEqToSelf(const char code[]) { +#define checkOpertorEqToSelf(code) checkOpertorEqToSelf_(code, __FILE__, __LINE__) + void checkOpertorEqToSelf_(const char code[], const char* file, int line) { // Clear the error log errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings1, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check.. CheckClass checkClass(&tokenizer, &settings1, this); @@ -2448,7 +2454,8 @@ class TestClass : public TestFixture { } // Check that base classes have virtual destructors - void checkVirtualDestructor(const char code[], bool inconclusive = false) { +#define checkVirtualDestructor(...) checkVirtualDestructor_(__FILE__, __LINE__, __VA_ARGS__) + void checkVirtualDestructor_(const char* file, int line, const char code[], bool inconclusive = false) { // Clear the error log errout.str(""); @@ -2458,7 +2465,7 @@ class TestClass : public TestFixture { // Tokenize.. Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check.. CheckClass checkClass(&tokenizer, &settings0, this); @@ -2742,21 +2749,23 @@ class TestClass : public TestFixture { ASSERT_EQUALS("", errout.str()); } - void checkNoMemset(const char code[]) { + +#define checkNoMemset(...) checkNoMemset_(__FILE__, __LINE__, __VA_ARGS__) + void checkNoMemset_(const char* file, int line, const char code[]) { Settings settings; settings.severity.enable(Severity::warning); settings.severity.enable(Severity::portability); - checkNoMemset(code,settings); + checkNoMemset_(file, line, code, settings); } - void checkNoMemset(const char code[], const Settings &settings) { + void checkNoMemset_(const char* file, int line, const char code[], const Settings &settings) { // Clear the error log errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check.. CheckClass checkClass(&tokenizer, &settings, this); @@ -3338,15 +3347,15 @@ class TestClass : public TestFixture { ASSERT_EQUALS("", errout.str()); } - - void checkThisSubtraction(const char code[]) { +#define checkThisSubtraction(code) checkThisSubtraction_(code, __FILE__, __LINE__) + void checkThisSubtraction_(const char code[], const char* file, int line) { // Clear the error log errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings1, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check.. CheckClass checkClass(&tokenizer, &settings1, this); @@ -3371,7 +3380,8 @@ class TestClass : public TestFixture { "[test.cpp:3]: (warning) Suspicious pointer subtraction. Did you intend to write '->'?\n", errout.str()); } - void checkConst(const char code[], Settings *s = nullptr, bool inconclusive = true) { +#define checkConst(...) checkConst_(__FILE__, __LINE__, __VA_ARGS__) + void checkConst_(const char* file, int line, const char code[], Settings *s = nullptr, bool inconclusive = true) { // Clear the error log errout.str(""); @@ -3383,10 +3393,10 @@ class TestClass : public TestFixture { // Tokenize.. Tokenizer tokenizer(s, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); CheckClass checkClass(&tokenizer, s, this); - checkClass.checkConst(); + (checkClass.checkConst)(); } void const1() { @@ -6559,7 +6569,8 @@ class TestClass : public TestFixture { ASSERT_EQUALS("", errout.str()); } - void checkInitializerListOrder(const char code[]) { +#define checkInitializerListOrder(code) checkInitializerListOrder_(code, __FILE__, __LINE__) + void checkInitializerListOrder_(const char code[], const char* file, int line) { // Clear the error log errout.str(""); @@ -6569,7 +6580,7 @@ class TestClass : public TestFixture { // Tokenize.. Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); CheckClass checkClass(&tokenizer, &settings0, this); checkClass.initializerListOrder(); @@ -6593,7 +6604,8 @@ class TestClass : public TestFixture { "[test.cpp:4] -> [test.cpp:2]: (style, inconclusive) Member variable 'Fred::a' is in the wrong place in the initializer list.\n", errout.str()); } - void checkInitializationListUsage(const char code[]) { +#define checkInitializationListUsage(code) checkInitializationListUsage_(code, __FILE__, __LINE__) + void checkInitializationListUsage_(const char code[], const char* file, int line) { // Clear the error log errout.str(""); @@ -6604,7 +6616,7 @@ class TestClass : public TestFixture { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); CheckClass checkClass(&tokenizer, &settings, this); checkClass.initializationListUsage(); @@ -6808,17 +6820,18 @@ class TestClass : public TestFixture { } - void checkSelfInitialization(const char code[]) { +#define checkSelfInitialization(code) checkSelfInitialization_(code, __FILE__, __LINE__) + void checkSelfInitialization_(const char code[], const char* file, int line) { // Clear the error log errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); CheckClass checkClass(&tokenizer, &settings0, this); - checkClass.checkSelfInitialization(); + (checkClass.checkSelfInitialization)(); } void selfInitialization() { @@ -6901,7 +6914,9 @@ class TestClass : public TestFixture { ASSERT_EQUALS("", errout.str()); } - void checkVirtualFunctionCall(const char code[], Settings *s = nullptr, bool inconclusive = true) { + +#define checkVirtualFunctionCall(...) checkVirtualFunctionCall_(__FILE__, __LINE__, __VA_ARGS__) + void checkVirtualFunctionCall_(const char* file, int line, const char code[], Settings *s = nullptr, bool inconclusive = true) { // Clear the error log errout.str(""); @@ -6916,7 +6931,7 @@ class TestClass : public TestFixture { // Tokenize.. Tokenizer tokenizer(s, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); CheckClass checkClass(&tokenizer, s, this); checkClass.checkVirtualFunctionCallInConstructor(); @@ -7175,7 +7190,9 @@ class TestClass : public TestFixture { ASSERT_EQUALS("", errout.str()); } - void checkOverride(const char code[]) { + +#define checkOverride(code) checkOverride_(code, __FILE__, __LINE__) + void checkOverride_(const char code[], const char* file, int line) { // Clear the error log errout.str(""); Settings settings; @@ -7184,11 +7201,11 @@ class TestClass : public TestFixture { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check.. CheckClass checkClass(&tokenizer, &settings, this); - checkClass.checkOverride(); + (checkClass.checkOverride)(); } void override1() { @@ -7257,7 +7274,9 @@ class TestClass : public TestFixture { ASSERT_EQUALS("", errout.str()); } - void checkUnsafeClassRefMember(const char code[]) { + +#define checkUnsafeClassRefMember(code) checkUnsafeClassRefMember_(code, __FILE__, __LINE__) + void checkUnsafeClassRefMember_(const char code[], const char* file, int line) { // Clear the error log errout.str(""); Settings settings; @@ -7267,11 +7286,11 @@ class TestClass : public TestFixture { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check.. CheckClass checkClass(&tokenizer, &settings, this); - checkClass.checkUnsafeClassRefMember(); + (checkClass.checkUnsafeClassRefMember)(); } void unsafeClassRefMember() { @@ -7279,21 +7298,23 @@ class TestClass : public TestFixture { ASSERT_EQUALS("[test.cpp:1]: (warning) Unsafe class: The const reference member 'C::s' is initialized by a const reference constructor argument. You need to be careful about lifetime issues.\n", errout.str()); } - void checkThisUseAfterFree(const char code[]) { + +#define checkThisUseAfterFree(code) checkThisUseAfterFree_(code, __FILE__, __LINE__) + void checkThisUseAfterFree_(const char code[], const char* file, int line) { // Clear the error log errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings1, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check.. CheckClass checkClass(&tokenizer, &settings1, this); - checkClass.checkThisUseAfterFree(); + (checkClass.checkThisUseAfterFree)(); } - void checkThisUseAfterFree() { + void thisUseAfterFree() { setMultiline(); // Calling method.. @@ -7443,7 +7464,7 @@ class TestClass : public TestFixture { for (const std::string& c: code) { Tokenizer tokenizer(&settings, this); std::istringstream istr(c); - tokenizer.tokenize(istr, (std::to_string(fileInfo.size()) + ".cpp").c_str()); + ASSERT(tokenizer.tokenize(istr, (std::to_string(fileInfo.size()) + ".cpp").c_str())); fileInfo.push_back(check.getFileInfo(&tokenizer, &settings)); } @@ -7474,24 +7495,26 @@ class TestClass : public TestFixture { ASSERT_EQUALS("", errout.str()); } - void getFileInfo(const char code[]) { + +#define getFileInfo(code) getFileInfo_(code, __FILE__, __LINE__) + void getFileInfo_(const char code[], const char* file, int line) { // Clear the error log errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings1, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check.. CheckClass checkClass(&tokenizer, &settings1, this); - Check::FileInfo * fileInfo = checkClass.getFileInfo(&tokenizer, &settings1); + Check::FileInfo * fileInfo = (checkClass.getFileInfo)(&tokenizer, &settings1); delete fileInfo; } - void getFileInfo() { + void testGetFileInfo() { getFileInfo("void foo() { union { struct { }; }; }"); // don't crash getFileInfo("struct sometype { sometype(); }; sometype::sometype() = delete;"); // don't crash } diff --git a/test/testcondition.cpp b/test/testcondition.cpp index 0021d4633e3..59a85f08497 100755 --- a/test/testcondition.cpp +++ b/test/testcondition.cpp @@ -493,6 +493,7 @@ class TestCondition : public TestFixture { ASSERT_EQUALS("",errout.str()); //correct for negative 'a' } +#define checkPureFunction(code) checkPureFunction_(code, __FILE__, __LINE__) void multicompare() { check("void foo(int x)\n" "{\n" @@ -532,14 +533,14 @@ class TestCondition : public TestFixture { ASSERT_EQUALS("[test.cpp:3]: (style) Expression is always false because 'else if' condition matches previous condition at line 2.\n", errout.str()); } - void checkPureFunction(const char code[]) { + void checkPureFunction_(const char code[], const char* file, int line) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings1, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); CheckCondition checkCondition; checkCondition.runChecks(&tokenizer, &settings1, this); diff --git a/test/testconstructors.cpp b/test/testconstructors.cpp index 7b10120edf5..1c3772dba1b 100644 --- a/test/testconstructors.cpp +++ b/test/testconstructors.cpp @@ -30,7 +30,8 @@ class TestConstructors : public TestFixture { private: Settings settings; - void check(const char code[], bool inconclusive = false) { +#define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) + void check_(const char* file, int line, const char code[], bool inconclusive = false) { // Clear the error buffer.. errout.str(""); @@ -39,21 +40,21 @@ class TestConstructors : public TestFixture { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check class constructors.. CheckClass checkClass(&tokenizer, &settings, this); checkClass.constructors(); } - void check(const char code[], const Settings &s) { + void check_(const char* file, int line, const char code[], const Settings &s) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&s, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check class constructors.. CheckClass checkClass(&tokenizer, &s, this); diff --git a/test/testexceptionsafety.cpp b/test/testexceptionsafety.cpp index 3a28021ccbc..0c76701584a 100644 --- a/test/testexceptionsafety.cpp +++ b/test/testexceptionsafety.cpp @@ -55,7 +55,8 @@ class TestExceptionSafety : public TestFixture { TEST_CASE(rethrowNoCurrentException3); } - void check(const char code[], bool inconclusive = false) { +#define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) + void check_(const char* file, int line, const char code[], bool inconclusive = false) { // Clear the error buffer.. errout.str(""); @@ -64,7 +65,7 @@ class TestExceptionSafety : public TestFixture { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check char variable usage.. CheckExceptionSafety checkExceptionSafety(&tokenizer, &settings, this); diff --git a/test/testexprengine.cpp b/test/testexprengine.cpp index b3cd8e55d7e..c4197c78370 100644 --- a/test/testexprengine.cpp +++ b/test/testexprengine.cpp @@ -175,12 +175,13 @@ class TestExprEngine : public TestFixture { return ret; } - std::string expr(const char code[], const std::string &binop) { +#define expr(code, binop) expr_(code, binop, __FILE__, __LINE__) + std::string expr_(const char code[], const std::string &binop, const char* file, int line) { Settings settings; settings.platform(cppcheck::Platform::Unix64); Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); std::string ret; ExprEngine::Callback f = [&](const Token *tok, const ExprEngine::Value &value, ExprEngine::DataBase *dataBase) { if (tok->str() != binop) @@ -197,7 +198,8 @@ class TestExprEngine : public TestFixture { return ret; } - std::string functionCallContractExpr(const char code[], const Settings &s) { +#define functionCallContractExpr(...) functionCallContractExpr_(code, s, __FILE__, __LINE__) + std::string functionCallContractExpr_(const char code[], const Settings &s, const char* file, int line) { Settings settings; settings.bugHunting = true; settings.debugBugHunting = true; @@ -205,7 +207,7 @@ class TestExprEngine : public TestFixture { settings.platform(cppcheck::Platform::Unix64); Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); std::vector callbacks; std::ostringstream trace; ExprEngine::executeAllFunctions(this, &tokenizer, &settings, callbacks, trace); @@ -217,20 +219,21 @@ class TestExprEngine : public TestFixture { return TestExprEngine::cleanupExpr(ret.substr(pos1, pos2+1-pos1)); } - std::string getRange(const char code[], const std::string &str, int linenr = 0) { +#define getRange(...) getRange_(__FILE__, __LINE__, __VA_ARGS__) + std::string getRange_(const char* file, int line, const char code[], const std::string &str, int linenr = 0) { Settings settings; settings.platform(cppcheck::Platform::Unix64); settings.library.smartPointers["std::shared_ptr"]; Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); std::string ret; ExprEngine::Callback f = [&](const Token *tok, const ExprEngine::Value &value, ExprEngine::DataBase *dataBase) { (void)dataBase; if ((linenr == 0 || linenr == tok->linenr()) && tok->expressionString() == str) { if (!ret.empty()) ret += ","; - ret += value.getRange(); + ret += (value.getRange)(); } }; std::vector callbacks; @@ -240,7 +243,8 @@ class TestExprEngine : public TestFixture { return ret; } - std::string trackExecution(const char code[], Settings *settings = nullptr) { +#define trackExecution(...) trackExecution_(__FILE__, __LINE__, __VA_ARGS__) + std::string trackExecution_(const char* file, int line, const char code[], Settings *settings = nullptr) { Settings s; if (!settings) settings = &s; @@ -250,7 +254,7 @@ class TestExprEngine : public TestFixture { settings->library.smartPointers["std::shared_ptr"]; Tokenizer tokenizer(settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); std::vector callbacks; std::ostringstream ret; ExprEngine::executeAllFunctions(this, &tokenizer, settings, callbacks, ret); diff --git a/test/testfunctions.cpp b/test/testfunctions.cpp index 9eca6b4838c..d10c439b8eb 100644 --- a/test/testfunctions.cpp +++ b/test/testfunctions.cpp @@ -95,7 +95,8 @@ class TestFunctions : public TestFixture { TEST_CASE(returnLocalStdMove5); } - void check(const char code[], const char filename[]="test.cpp", const Settings* settings_=nullptr) { +#define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) + void check_(const char* file, int line, const char code[], const char filename[] = "test.cpp", const Settings* settings_ = nullptr) { // Clear the error buffer.. errout.str(""); @@ -105,7 +106,7 @@ class TestFunctions : public TestFixture { // Tokenize.. Tokenizer tokenizer(settings_, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); CheckFunctions checkFunctions(&tokenizer, settings_, this); checkFunctions.runChecks(&tokenizer, settings_, this); diff --git a/test/testgarbage.cpp b/test/testgarbage.cpp index fd75afa5401..3b563e061d4 100644 --- a/test/testgarbage.cpp +++ b/test/testgarbage.cpp @@ -259,6 +259,7 @@ class TestGarbage : public TestFixture { TEST_CASE(nonGarbageCode1); // #8346 } +#define checkCodeInternal(code, filename) checkCodeInternal_(code, filename, __FILE__, __LINE__) std::string checkCode(const std::string &code, bool cpp = true) { // double the tests - run each example as C as well as C++ const char* const filename = cpp ? "test.cpp" : "test.c"; @@ -272,13 +273,13 @@ class TestGarbage : public TestFixture { return checkCodeInternal(code, filename); } - std::string checkCodeInternal(const std::string &code, const char* filename) { + std::string checkCodeInternal_(const std::string &code, const char* filename, const char* file, int line) { errout.str(""); // tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); // call all "runChecks" in all registered Check classes for (std::list::const_iterator it = Check::instances().begin(); it != Check::instances().end(); ++it) { @@ -288,11 +289,12 @@ class TestGarbage : public TestFixture { return tokenizer.tokens()->stringifyList(false, false, false, true, false, nullptr, nullptr); } - std::string getSyntaxError(const char code[]) { +#define getSyntaxError(code) getSyntaxError_(code, __FILE__, __LINE__) + std::string getSyntaxError_(const char code[], const char* file, int line) { Tokenizer tokenizer(&settings, this); std::istringstream istr(code); try { - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); } catch (InternalError& e) { if (e.id != "syntaxError") return ""; @@ -309,7 +311,7 @@ class TestGarbage : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT_EQUALS("", errout.str()); } } @@ -357,7 +359,7 @@ class TestGarbage : public TestFixture { Tokenizer tokenizer(&settings, this); std::istringstream istr(code); try { - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); assertThrowFail(__FILE__, __LINE__); } catch (InternalError& e) { ASSERT_EQUALS("syntax error", e.errorMessage); @@ -391,14 +393,14 @@ class TestGarbage : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.c"); + ASSERT(tokenizer.tokenize(istr, "test.c")); ASSERT_EQUALS("", errout.str()); } { errout.str(""); Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT_EQUALS("[test.cpp:1]: (information) The code 'class x y {' is not handled. You can use -I or --include to add handling of this code.\n", errout.str()); } } diff --git a/test/testinternal.cpp b/test/testinternal.cpp index 2d7c184eb5a..f4f26920f56 100644 --- a/test/testinternal.cpp +++ b/test/testinternal.cpp @@ -46,14 +46,15 @@ class TestInternal : public TestFixture { TEST_CASE(checkRedundantTokCheck); } - void check(const char code[]) { +#define check(code) check_(code, __FILE__, __LINE__) + void check_(const char code[], const char* file, int line) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check.. CheckInternal checkInternal; diff --git a/test/testio.cpp b/test/testio.cpp index e13e50d382c..8060b739a7c 100644 --- a/test/testio.cpp +++ b/test/testio.cpp @@ -77,7 +77,8 @@ class TestIO : public TestFixture { TEST_CASE(testStdDistance); // #10304 } - void check(const char* code, bool inconclusive = false, bool portability = false, Settings::PlatformType platform = Settings::Unspecified, bool onlyFormatStr = false) { +#define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) + void check_(const char* file, int line, const char* code, bool inconclusive = false, bool portability = false, Settings::PlatformType platform = Settings::Unspecified, bool onlyFormatStr = false) { // Clear the error buffer.. errout.str(""); @@ -92,7 +93,7 @@ class TestIO : public TestFixture { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check.. CheckIO checkIO(&tokenizer, &settings, this); diff --git a/test/testleakautovar.cpp b/test/testleakautovar.cpp index 5be1e1f74e3..a92d7afe088 100644 --- a/test/testleakautovar.cpp +++ b/test/testleakautovar.cpp @@ -201,14 +201,15 @@ class TestLeakAutoVar : public TestFixture { TEST_CASE(functionCallCastConfig); // #9652 } - void check(const char code[], bool cpp = false) { +#define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) + void check_(const char* file, int line, const char code[], bool cpp = false) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, cpp?"test.cpp":"test.c"); + ASSERT_LOC(tokenizer.tokenize(istr, cpp ? "test.cpp" : "test.c"), file, line); // Check for leaks.. CheckLeakAutoVar c; @@ -217,14 +218,14 @@ class TestLeakAutoVar : public TestFixture { c.runChecks(&tokenizer, &settings, this); } - void check(const char code[], Settings & settings_) { + void check_(const char* file, int line, const char code[], Settings & settings_) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings_, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check for leaks.. CheckLeakAutoVar c; @@ -2307,14 +2308,14 @@ class TestLeakAutoVarStrcpy : public TestFixture { private: Settings settings; - void check(const char code[]) { + void check_(const char* file, int line, const char code[]) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check for leaks.. CheckLeakAutoVar checkLeak; @@ -2357,14 +2358,14 @@ class TestLeakAutoVarWindows : public TestFixture { private: Settings settings; - void check(const char code[]) { + void check_(const char* file, int line, const char code[]) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.c"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.c"), file, line); // Check for leaks.. CheckLeakAutoVar checkLeak; diff --git a/test/testlibrary.cpp b/test/testlibrary.cpp index add484834c0..da3c44ef12a 100644 --- a/test/testlibrary.cpp +++ b/test/testlibrary.cpp @@ -554,14 +554,14 @@ class TestLibrary : public TestFixture { { Tokenizer tokenizer(&settings, nullptr); std::istringstream istr("CString str; str.Format();"); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(library.isnotnoreturn(Token::findsimplematch(tokenizer.tokens(), "Format"))); } { Tokenizer tokenizer(&settings, nullptr); std::istringstream istr("HardDrive hd; hd.Format();"); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(!library.isnotnoreturn(Token::findsimplematch(tokenizer.tokens(), "Format"))); } } @@ -580,14 +580,14 @@ class TestLibrary : public TestFixture { { Tokenizer tokenizer(&settings, nullptr); std::istringstream istr("struct X : public Base { void dostuff() { f(0); } };"); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(library.isnullargbad(Token::findsimplematch(tokenizer.tokens(), "f"),1)); } { Tokenizer tokenizer(&settings, nullptr); std::istringstream istr("struct X : public Base { void dostuff() { f(1,2); } };"); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(!library.isnullargbad(Token::findsimplematch(tokenizer.tokens(), "f"),1)); } } diff --git a/test/testmemleak.cpp b/test/testmemleak.cpp index fabb168c189..d16316392dd 100644 --- a/test/testmemleak.cpp +++ b/test/testmemleak.cpp @@ -39,18 +39,19 @@ class TestMemleak : private TestFixture { TEST_CASE(open); } - CheckMemoryLeak::AllocType functionReturnType(const char code[]) { +#define functionReturnType(code) functionReturnType_(code, __FILE__, __LINE__) + CheckMemoryLeak::AllocType functionReturnType_(const char code[], const char* file, int line) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); const CheckMemoryLeak c(&tokenizer, this, &settings); - return c.functionReturnType(&tokenizer.getSymbolDatabase()->scopeList.front().functionList.front()); + return (c.functionReturnType)(&tokenizer.getSymbolDatabase()->scopeList.front().functionList.front()); } void testFunctionReturnType() { @@ -98,7 +99,7 @@ class TestMemleak : private TestFixture { Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); // there is no allocation const Token *tok = Token::findsimplematch(tokenizer.tokens(), "ret ="); @@ -122,7 +123,8 @@ class TestMemleakInFunction : public TestFixture { Settings settings1; Settings settings2; - void check(const char code[]) { +#define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) + void check_(const char* file, int line, const char code[]) { // Clear the error buffer.. errout.str(""); @@ -131,7 +133,7 @@ class TestMemleakInFunction : public TestFixture { // Tokenize.. Tokenizer tokenizer(settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check for memory leaks.. CheckMemoryLeakInFunction checkMemoryLeak(&tokenizer, settings, this); @@ -467,18 +469,18 @@ class TestMemleakInClass : public TestFixture { * Tokenize and execute leak check for given code * @param code Source code */ - void check(const char code[]) { + void check_(const char* file, int line, const char code[]) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check for memory leaks.. CheckMemoryLeakInClass checkMemoryLeak(&tokenizer, &settings, this); - checkMemoryLeak.check(); + (checkMemoryLeak.check)(); } void run() OVERRIDE { @@ -1639,18 +1641,18 @@ class TestMemleakStructMember : public TestFixture { private: Settings settings; - void check(const char code[], bool isCPP = true) { + void check_(const char* file, int line, const char code[], bool isCPP = true) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, isCPP ? "test.cpp" : "test.c"); + ASSERT_LOC(tokenizer.tokenize(istr, isCPP ? "test.cpp" : "test.c"), file, line); // Check for memory leaks.. CheckMemoryLeakStructMember checkMemoryLeakStructMember(&tokenizer, &settings, this); - checkMemoryLeakStructMember.check(); + (checkMemoryLeakStructMember.check)(); } void run() OVERRIDE { @@ -2120,18 +2122,18 @@ class TestMemleakNoVar : public TestFixture { private: Settings settings; - void check(const char code[]) { + void check_(const char* file, int line, const char code[]) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check for memory leaks.. CheckMemoryLeakNoVar checkMemoryLeakNoVar(&tokenizer, &settings, this); - checkMemoryLeakNoVar.check(); + (checkMemoryLeakNoVar.check)(); } void run() OVERRIDE { diff --git a/test/testnullpointer.cpp b/test/testnullpointer.cpp index 8716801c660..51d98aba99d 100644 --- a/test/testnullpointer.cpp +++ b/test/testnullpointer.cpp @@ -155,10 +155,11 @@ class TestNullPointer : public TestFixture { TEST_CASE(addNull); TEST_CASE(isPointerDeRefFunctionDecl); - TEST_CASE(ctu); + TEST_CASE(ctuTest); } - void check(const char code[], bool inconclusive = false, const char filename[] = "test.cpp") { +#define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) + void check_(const char* file, int line, const char code[], bool inconclusive = false, const char filename[] = "test.cpp") { // Clear the error buffer.. errout.str(""); @@ -167,8 +168,7 @@ class TestNullPointer : public TestFixture { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - if (!tokenizer.tokenize(istr, filename)) - return; + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); // Check for null pointer dereferences.. CheckNullPointer checkNullPointer; @@ -3666,7 +3666,7 @@ class TestNullPointer : public TestFixture { Settings settings1; Tokenizer tokenizer(&settings1,this); std::istringstream code("void f() { int a,b,c; x(a,b,c); }"); - tokenizer.tokenize(code,"test.c"); + ASSERT_EQUALS(true, tokenizer.tokenize(code, "test.c")); const Token *xtok = Token::findsimplematch(tokenizer.tokens(), "x"); // nothing bad.. @@ -3988,14 +3988,15 @@ class TestNullPointer : public TestFixture { ASSERT_EQUALS("", errout.str()); } - void ctu(const char code[]) { +#define ctu(code) ctu_(code, __FILE__, __LINE__) + void ctu_(const char code[], const char* file, int line) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); CTU::FileInfo *ctu = CTU::getFileInfo(&tokenizer); @@ -4011,7 +4012,7 @@ class TestNullPointer : public TestFixture { delete ctu; } - void ctu() { + void ctuTest() { setMultiline(); ctu("void f(int *fp) {\n" diff --git a/test/testother.cpp b/test/testother.cpp index 1d8e4e1b33e..d55e3558345 100644 --- a/test/testother.cpp +++ b/test/testother.cpp @@ -259,7 +259,8 @@ class TestOther : public TestFixture { TEST_CASE(constVariableArrayMember); // #10371 } - void check(const char code[], const char *filename = nullptr, bool experimental = false, bool inconclusive = true, bool runSimpleChecks=true, bool verbose=false, Settings* settings = nullptr) { +#define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) + void check_(const char* file, int line, const char code[], const char *filename = nullptr, bool experimental = false, bool inconclusive = true, bool runSimpleChecks=true, bool verbose=false, Settings* settings = nullptr) { // Clear the error buffer.. errout.str(""); @@ -279,7 +280,7 @@ class TestOther : public TestFixture { // Tokenize.. Tokenizer tokenizer(settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename ? filename : "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, filename ? filename : "test.cpp"), file, line); // Check.. CheckOther checkOther(&tokenizer, settings, this); @@ -288,8 +289,8 @@ class TestOther : public TestFixture { (void)runSimpleChecks; // TODO Remove this } - void check(const char code[], Settings *s) { - check(code,"test.cpp",false,true,true,false,s); + void check_(const char* file, int line, const char code[], Settings *s) { + check_(file, line, code, "test.cpp", false, true, true, false, s); } void checkP(const char code[], const char *filename = "test.cpp") { @@ -1299,7 +1300,8 @@ class TestOther : public TestFixture { ASSERT_EQUALS("[test.cpp:4]: (style) The scope of the variable 'x' can be reduced.\n", errout.str()); } - void checkOldStylePointerCast(const char code[]) { +#define checkOldStylePointerCast(code) checkOldStylePointerCast_(code, __FILE__, __LINE__) + void checkOldStylePointerCast_(const char code[], const char* file, int line) { // Clear the error buffer.. errout.str(""); @@ -1310,7 +1312,7 @@ class TestOther : public TestFixture { // Tokenize.. Tokenizer tokenizerCpp(&settings, this); std::istringstream istr(code); - tokenizerCpp.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizerCpp.tokenize(istr, "test.cpp"), file, line); CheckOther checkOtherCpp(&tokenizerCpp, &settings, this); checkOtherCpp.warningOldStylePointerCast(); @@ -1440,7 +1442,8 @@ class TestOther : public TestFixture { ASSERT_EQUALS("[test.cpp:5]: (style) C-style pointer casting\n", errout.str()); } - void checkInvalidPointerCast(const char code[], bool portability = true, bool inconclusive = false) { +#define checkInvalidPointerCast(...) checkInvalidPointerCast_(__FILE__, __LINE__, __VA_ARGS__) + void checkInvalidPointerCast_(const char* file, int line, const char code[], bool portability = true, bool inconclusive = false) { // Clear the error buffer.. errout.str(""); @@ -1454,7 +1457,7 @@ class TestOther : public TestFixture { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); CheckOther checkOtherCpp(&tokenizer, &settings, this); checkOtherCpp.invalidPointerCast(); diff --git a/test/testpostfixoperator.cpp b/test/testpostfixoperator.cpp index 5533f4f25a6..e503e76990f 100644 --- a/test/testpostfixoperator.cpp +++ b/test/testpostfixoperator.cpp @@ -30,15 +30,15 @@ class TestPostfixOperator : public TestFixture { private: Settings settings; - - void check(const char code[]) { +#define check(code) check_(code, __FILE__, __LINE__) + void check_(const char code[], const char* file, int line) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check for postfix operators.. CheckPostfixOperator checkPostfixOperator(&tokenizer, &settings, this); diff --git a/test/testsimplifytemplate.cpp b/test/testsimplifytemplate.cpp index 40e2903f905..f6407ca71f8 100644 --- a/test/testsimplifytemplate.cpp +++ b/test/testsimplifytemplate.cpp @@ -261,7 +261,7 @@ class TestSimplifyTemplate : public TestFixture { TEST_CASE(templateAlias5); // Test TemplateSimplifier::instantiateMatch - TEST_CASE(instantiateMatch); + TEST_CASE(instantiateMatchTest); TEST_CASE(templateParameterWithoutName); // #8602 Template default parameter without name yields syntax error TEST_CASE(templateTypeDeduction1); // #8962 @@ -302,7 +302,8 @@ class TestSimplifyTemplate : public TestFixture { TEST_CASE(explicitBool2); } - std::string tok(const char code[], bool debugwarnings = false, Settings::PlatformType type = Settings::Native) { +#define tok(...) tok_(__FILE__, __LINE__, __VA_ARGS__) + std::string tok_(const char* file, int line, const char code[], bool debugwarnings = false, Settings::PlatformType type = Settings::Native) { errout.str(""); settings.debugwarnings = debugwarnings; @@ -310,7 +311,7 @@ class TestSimplifyTemplate : public TestFixture { Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); return tokenizer.tokens()->stringifyList(nullptr, true); } @@ -5614,16 +5615,17 @@ class TestSimplifyTemplate : public TestFixture { ASSERT_EQUALS(expected, tok(code)); } - bool instantiateMatch(const char code[], const std::size_t numberOfArguments, const char patternAfter[]) { +#define instantiateMatch(code, numberOfArguments, patternAfter) instantiateMatch_(code, numberOfArguments, patternAfter, __FILE__, __LINE__) + bool instantiateMatch_(const char code[], const std::size_t numberOfArguments, const char patternAfter[], const char* file, int line) { Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp", ""); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp", ""), file, line); - return TemplateSimplifier::instantiateMatch(tokenizer.tokens(), numberOfArguments, false, patternAfter); + return (TemplateSimplifier::instantiateMatch)(tokenizer.tokens(), numberOfArguments, false, patternAfter); } - void instantiateMatch() { + void instantiateMatchTest() { // Ticket #8175 ASSERT_EQUALS(false, instantiateMatch("ConvertHelper < From, To > c ;", diff --git a/test/testsimplifytokens.cpp b/test/testsimplifytokens.cpp index dede4b01f14..2a2d540aef8 100644 --- a/test/testsimplifytokens.cpp +++ b/test/testsimplifytokens.cpp @@ -370,14 +370,15 @@ class TestSimplifyTokens : public TestFixture { TEST_CASE(simplifyVarDeclInitLists); } - std::string tok(const char code[], bool simplify = true, Settings::PlatformType type = Settings::Native) { +#define tok(...) tok_(__FILE__, __LINE__, __VA_ARGS__) + std::string tok_(const char* file, int line, const char code[], bool simplify = true, Settings::PlatformType type = Settings::Native) { errout.str(""); settings0.platform(type); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); if (simplify) tokenizer.simplifyTokenList2(); @@ -385,14 +386,15 @@ class TestSimplifyTokens : public TestFixture { return tokenizer.tokens()->stringifyList(nullptr, !simplify); } - std::string tokWithWindows(const char code[], bool simplify = true, Settings::PlatformType type = Settings::Native) { +#define tokWithWindows(...) tokWithWindows_(__FILE__, __LINE__, __VA_ARGS__) + std::string tokWithWindows_(const char* file, int line, const char code[], bool simplify = true, Settings::PlatformType type = Settings::Native) { errout.str(""); settings_windows.platform(type); Tokenizer tokenizer(&settings_windows, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); if (simplify) tokenizer.simplifyTokenList2(); @@ -400,44 +402,47 @@ class TestSimplifyTokens : public TestFixture { return tokenizer.tokens()->stringifyList(nullptr, !simplify); } - std::string tok(const char code[], const char filename[], bool simplify = true) { + std::string tok_(const char* file, int line, const char code[], const char filename[], bool simplify = true) { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); if (simplify) tokenizer.simplifyTokenList2(); return tokenizer.tokens()->stringifyList(nullptr, false); } - std::string tokWithNewlines(const char code[]) { +#define tokWithNewlines(code) tokWithNewlines_(code, __FILE__, __LINE__) + std::string tokWithNewlines_(const char code[], const char* file, int line) { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); tokenizer.simplifyTokenList2(); return tokenizer.tokens()->stringifyList(false, false, false, true, false); } - std::string tokWithStdLib(const char code[]) { +#define tokWithStdLib(code) tokWithStdLib_(code, __FILE__, __LINE__) + std::string tokWithStdLib_(const char code[], const char* file, int line) { errout.str(""); Tokenizer tokenizer(&settings_std, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); tokenizer.simplifyTokenList2(); return tokenizer.tokens()->stringifyList(nullptr, false); } - std::string tokenizeAndStringify(const char code[], bool simplify = false, bool expand = true, Settings::PlatformType platform = Settings::Native, const char* filename = "test.cpp", bool cpp11 = true) { +#define tokenizeAndStringify(...) tokenizeAndStringify_(__FILE__, __LINE__, __VA_ARGS__) + std::string tokenizeAndStringify_(const char* file, int linenr, const char code[], bool simplify = false, bool expand = true, Settings::PlatformType platform = Settings::Native, const char* filename = "test.cpp", bool cpp11 = true) { errout.str(""); settings1.debugwarnings = true; @@ -447,7 +452,7 @@ class TestSimplifyTokens : public TestFixture { // tokenize.. Tokenizer tokenizer(&settings1, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, linenr); if (simplify) tokenizer.simplifyTokenList2(); @@ -467,12 +472,13 @@ class TestSimplifyTokens : public TestFixture { return ""; } - std::string tokenizeDebugListing(const char code[], bool simplify = false, const char filename[] = "test.cpp") { +#define tokenizeDebugListing(...) tokenizeDebugListing_(__FILE__, __LINE__, __VA_ARGS__) + std::string tokenizeDebugListing_(const char* file, int line, const char code[], bool simplify = false, const char filename[] = "test.cpp") { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); if (simplify) tokenizer.simplifyTokenList2(); @@ -1999,7 +2005,7 @@ class TestSimplifyTokens : public TestFixture { Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT_EQUALS(expected, tokenizer.tokens()->stringifyList(nullptr, false)); } @@ -2011,7 +2017,7 @@ class TestSimplifyTokens : public TestFixture { Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT_EQUALS(expected, tokenizer.tokens()->stringifyList(nullptr, false)); } @@ -2023,7 +2029,7 @@ class TestSimplifyTokens : public TestFixture { Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT_EQUALS(expected, tokenizer.tokens()->stringifyList(nullptr, false)); } @@ -2035,7 +2041,7 @@ class TestSimplifyTokens : public TestFixture { Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT_EQUALS(expected, tokenizer.tokens()->stringifyList(nullptr, false)); } @@ -2047,7 +2053,7 @@ class TestSimplifyTokens : public TestFixture { Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT_EQUALS(expected, tokenizer.tokens()->stringifyList(nullptr, false)); } @@ -3101,14 +3107,14 @@ class TestSimplifyTokens : public TestFixture { } } - - std::string simplifyIfAndWhileAssign(const char code[]) { +#define simplifyIfAndWhileAssign(code) simplifyIfAndWhileAssign_(code, __FILE__, __LINE__) + std::string simplifyIfAndWhileAssign_(const char code[], const char* file, int line) { // tokenize.. Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); - tokenizer.simplifyIfAndWhileAssign(); + (tokenizer.simplifyIfAndWhileAssign)(); return tokenizer.tokens()->stringifyList(nullptr, false); } @@ -3195,7 +3201,7 @@ class TestSimplifyTokens : public TestFixture { Tokenizer tokenizer(&settings0, this); std::istringstream istr("{ while (!(m = q->push(x))) {} }"); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); tokenizer.simplifyTokenList2(); ASSERT_EQUALS("{ m = q . push < Message > ( x ) ; while ( ! m ) { m = q . push < Message > ( x ) ; } }", tokenizer.tokens()->stringifyList(nullptr, false)); @@ -4445,7 +4451,7 @@ class TestSimplifyTokens : public TestFixture { void duplicateDefinition() { // #3565 - wrongly detects duplicate definition Tokenizer tokenizer(&settings0, this); std::istringstream istr("{ x ; return a not_eq x; }"); - tokenizer.tokenize(istr, "test.c"); + ASSERT(tokenizer.tokenize(istr, "test.c")); Token *x_token = tokenizer.list.front()->tokAt(5); ASSERT_EQUALS(false, tokenizer.duplicateDefinition(&x_token)); } @@ -5234,14 +5240,15 @@ class TestSimplifyTokens : public TestFixture { "}")); } - std::string simplifyKnownVariables(const char code[]) { +#define simplifyKnownVariables(code) simplifyKnownVariables_(code, __FILE__, __LINE__) + std::string simplifyKnownVariables_(const char code[], const char* file, int line) { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); - tokenizer.simplifyKnownVariables(); + (tokenizer.simplifyKnownVariables)(); return tokenizer.tokens()->stringifyList(nullptr, false); } diff --git a/test/testsimplifytypedef.cpp b/test/testsimplifytypedef.cpp index 08007ceef7c..404aa123f56 100644 --- a/test/testsimplifytypedef.cpp +++ b/test/testsimplifytypedef.cpp @@ -199,7 +199,8 @@ class TestSimplifyTypedef : public TestFixture { TEST_CASE(simplifyTypedefMacro); } - std::string tok(const char code[], bool simplify = true, Settings::PlatformType type = Settings::Native, bool debugwarnings = true) { +#define tok(...) tok_(__FILE__, __LINE__, __VA_ARGS__) + std::string tok_(const char* file, int line, const char code[], bool simplify = true, Settings::PlatformType type = Settings::Native, bool debugwarnings = true) { errout.str(""); settings0.certainty.enable(Certainty::inconclusive); @@ -208,7 +209,7 @@ class TestSimplifyTypedef : public TestFixture { Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); return tokenizer.tokens()->stringifyList(nullptr, !simplify); } @@ -250,15 +251,15 @@ class TestSimplifyTypedef : public TestFixture { return tokenizer.tokens()->stringifyList(nullptr, false); } - - void checkSimplifyTypedef(const char code[]) { +#define checkSimplifyTypedef(code) checkSimplifyTypedef_(code, __FILE__, __LINE__) + void checkSimplifyTypedef_(const char code[], const char* file, int line) { errout.str(""); // Tokenize.. settings2.certainty.enable(Certainty::inconclusive); settings2.debugwarnings = true; // show warnings about unhandled typedef Tokenizer tokenizer(&settings2, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); } diff --git a/test/testsimplifyusing.cpp b/test/testsimplifyusing.cpp index 78a78fef756..ba223d3db7b 100644 --- a/test/testsimplifyusing.cpp +++ b/test/testsimplifyusing.cpp @@ -95,7 +95,8 @@ class TestSimplifyUsing : public TestFixture { TEST_CASE(scopeInfo2); } - std::string tok(const char code[], Settings::PlatformType type = Settings::Native, bool debugwarnings = true) { +#define tok(...) tok_(__FILE__, __LINE__, __VA_ARGS__) + std::string tok_(const char* file, int line, const char code[], Settings::PlatformType type = Settings::Native, bool debugwarnings = true) { errout.str(""); settings0.certainty.enable(Certainty::inconclusive); @@ -104,7 +105,7 @@ class TestSimplifyUsing : public TestFixture { Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); return tokenizer.tokens()->stringifyList(nullptr); } diff --git a/test/testsizeof.cpp b/test/testsizeof.cpp index 6b4a8d204fc..f7732b00f42 100644 --- a/test/testsizeof.cpp +++ b/test/testsizeof.cpp @@ -50,14 +50,15 @@ class TestSizeof : public TestFixture { TEST_CASE(customStrncat); } - void check(const char code[]) { +#define check(code) check_(code, __FILE__, __LINE__) + void check_(const char code[], const char* file, int line) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check... CheckSizeof checkSizeof(&tokenizer, &settings, this); diff --git a/test/teststl.cpp b/test/teststl.cpp index 8f03f032838..ed3761ece00 100755 --- a/test/teststl.cpp +++ b/test/teststl.cpp @@ -174,7 +174,8 @@ class TestStl : public TestFixture { TEST_CASE(checkMutexes); } - void check(const char code[], const bool inconclusive=false, const Standards::cppstd_t cppstandard=Standards::CPPLatest) { +#define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) + void check_(const char* file, int line, const char code[], const bool inconclusive = false, const Standards::cppstd_t cppstandard = Standards::CPPLatest) { // Clear the error buffer.. errout.str(""); @@ -188,22 +189,23 @@ class TestStl : public TestFixture { CheckStl checkStl(&tokenizer, &settings, this); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); checkStl.runChecks(&tokenizer, &settings, this); } - void check(const std::string &code, const bool inconclusive=false) { - check(code.c_str(), inconclusive); + void check_(const char* file, int line, const std::string& code, const bool inconclusive = false) { + check_(file, line, code.c_str(), inconclusive); } - void checkNormal(const char code[]) { +#define checkNormal(code) checkNormal_(code, __FILE__, __LINE__) + void checkNormal_(const char code[], const char* file, int line) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check.. CheckStl checkStl(&tokenizer, &settings, this); diff --git a/test/teststring.cpp b/test/teststring.cpp index 32a8c67f024..fecfe5e774f 100644 --- a/test/teststring.cpp +++ b/test/teststring.cpp @@ -60,14 +60,15 @@ class TestString : public TestFixture { TEST_CASE(deadStrcmp); } - void check(const char code[], const char filename[] = "test.cpp") { +#define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) + void check_(const char* file, int line, const char code[], const char filename[] = "test.cpp") { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); // Check char variable usage.. CheckString checkString(&tokenizer, &settings, this); diff --git a/test/testsuite.h b/test/testsuite.h index 4e7e312d850..291aa7abf10 100644 --- a/test/testsuite.h +++ b/test/testsuite.h @@ -121,6 +121,7 @@ extern std::ostringstream output; #define TEST_CASE( NAME ) do { if (prepareTest(#NAME)) { setVerbose(false); NAME(); } } while (false) #define ASSERT( CONDITION ) if (!assert_(__FILE__, __LINE__, (CONDITION))) return +#define ASSERT_LOC( CONDITION, FILE_, LINE_ ) assert_(FILE_, LINE_, (CONDITION)) #define CHECK_EQUALS( EXPECTED, ACTUAL ) assertEquals(__FILE__, __LINE__, (EXPECTED), (ACTUAL)) #define ASSERT_EQUALS( EXPECTED, ACTUAL ) if (!assertEquals(__FILE__, __LINE__, (EXPECTED), (ACTUAL))) return #define ASSERT_EQUALS_WITHOUT_LINENUMBERS( EXPECTED, ACTUAL ) assertEqualsWithoutLineNumbers(__FILE__, __LINE__, EXPECTED, ACTUAL) diff --git a/test/testsummaries.cpp b/test/testsummaries.cpp index f2a9ef2aecb..673a0459994 100644 --- a/test/testsummaries.cpp +++ b/test/testsummaries.cpp @@ -36,7 +36,8 @@ class TestSummaries : public TestFixture { TEST_CASE(createSummariesNoreturn); } - std::string createSummaries(const char code[], const char filename[] = "test.cpp") { +#define createSummaries(...) createSummaries_(__FILE__, __LINE__, __VA_ARGS__) + std::string createSummaries_(const char* file, int line, const char code[], const char filename[] = "test.cpp") { // Clear the error buffer.. errout.str(""); @@ -44,7 +45,7 @@ class TestSummaries : public TestFixture { Settings settings; Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); return Summaries::create(&tokenizer, ""); } diff --git a/test/testsymboldatabase.cpp b/test/testsymboldatabase.cpp index cf0b4f225c1..8f3852524f7 100644 --- a/test/testsymboldatabase.cpp +++ b/test/testsymboldatabase.cpp @@ -82,8 +82,7 @@ class TestSymbolDatabase : public TestFixture { const static SymbolDatabase* getSymbolDB_inner(Tokenizer& tokenizer, const char* code, const char* filename) { errout.str(""); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); - return tokenizer.getSymbolDatabase(); + return tokenizer.tokenize(istr, filename) ? tokenizer.getSymbolDatabase() : nullptr; } static const Scope *findFunctionScopeByToken(const SymbolDatabase * db, const Token *tok) { @@ -2133,7 +2132,8 @@ class TestSymbolDatabase : public TestFixture { ASSERT_EQUALS("char", arg1->typeEndToken()->str()); } - void check(const char code[], bool debug = true, const char filename[] = "test.cpp") { +#define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) + void check_(const char* file, int line, const char code[], bool debug = true, const char filename[] = "test.cpp") { // Clear the error log errout.str(""); @@ -2143,7 +2143,7 @@ class TestSymbolDatabase : public TestFixture { // Tokenize.. Tokenizer tokenizer(&settings1, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); // force symbol database creation tokenizer.createSymbolDatabase(); @@ -7116,10 +7116,11 @@ class TestSymbolDatabase : public TestFixture { ASSERT(class_scope->functionList.begin()->functionScope == &*scope); } - std::string typeOf(const char code[], const char pattern[], const char filename[] = "test.cpp", const Settings *settings = nullptr) { +#define typeOf(...) typeOf_(__FILE__, __LINE__, __VA_ARGS__) + std::string typeOf_(const char* file, int line, const char code[], const char pattern[], const char filename[] = "test.cpp", const Settings *settings = nullptr) { Tokenizer tokenizer(settings ? settings : &settings2, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); const Token* tok; for (tok = tokenizer.list.back(); tok; tok = tok->previous()) if (Token::simpleMatch(tok, pattern, strlen(pattern))) diff --git a/test/testtoken.cpp b/test/testtoken.cpp index 35a236ddf73..b3c851d3da7 100644 --- a/test/testtoken.cpp +++ b/test/testtoken.cpp @@ -132,12 +132,13 @@ class TestToken : public TestFixture { TokenList::deleteTokens(token); } - bool Match(const std::string &code, const std::string &pattern, unsigned int varid=0) { +#define MatchCheck(...) MatchCheck_(__FILE__, __LINE__, __VA_ARGS__) + bool MatchCheck_(const char* file, int line, const std::string& code, const std::string& pattern, unsigned int varid = 0) { static const Settings settings; Tokenizer tokenizer(&settings, this); std::istringstream istr(";" + code + ";"); try { - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); } catch (...) {} return Token::Match(tokenizer.tokens()->next(), pattern.c_str(), varid); } @@ -762,7 +763,7 @@ class TestToken : public TestFixture { std::vector::const_iterator test_op, test_ops_end = test_ops.end(); for (test_op = test_ops.begin(); test_op != test_ops_end; ++test_op) { - ASSERT_EQUALS(true, Match(*test_op, "%op%")); + ASSERT_EQUALS(true, MatchCheck(*test_op, "%op%")); } // Negative test against other operators @@ -771,7 +772,7 @@ class TestToken : public TestFixture { std::vector::const_iterator other_op, other_ops_end = other_ops.end(); for (other_op = other_ops.begin(); other_op != other_ops_end; ++other_op) { - ASSERT_EQUALS_MSG(false, Match(*other_op, "%op%"), "Failing other operator: " + *other_op); + ASSERT_EQUALS_MSG(false, MatchCheck(*other_op, "%op%"), "Failing other operator: " + *other_op); } } @@ -784,7 +785,7 @@ class TestToken : public TestFixture { std::vector::const_iterator test_op, test_ops_end = test_ops.end(); for (test_op = test_ops.begin(); test_op != test_ops_end; ++test_op) { - ASSERT_EQUALS(true, Match(*test_op, "%cop%")); + ASSERT_EQUALS(true, MatchCheck(*test_op, "%cop%")); } // Negative test against other operators @@ -794,7 +795,7 @@ class TestToken : public TestFixture { std::vector::const_iterator other_op, other_ops_end = other_ops.end(); for (other_op = other_ops.begin(); other_op != other_ops_end; ++other_op) { - ASSERT_EQUALS_MSG(false, Match(*other_op, "%cop%"), "Failing other operator: " + *other_op); + ASSERT_EQUALS_MSG(false, MatchCheck(*other_op, "%cop%"), "Failing other operator: " + *other_op); } } diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index ae501af78ea..71f1588f365 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -443,7 +443,8 @@ class TestTokenizer : public TestFixture { TEST_CASE(simplifyIfSwitchForInit4); } - std::string tokenizeAndStringify(const char code[], bool expand = true, Settings::PlatformType platform = Settings::Native, const char* filename = "test.cpp", bool cpp11 = true) { +#define tokenizeAndStringify(...) tokenizeAndStringify_(__FILE__, __LINE__, __VA_ARGS__) + std::string tokenizeAndStringify_(const char* file, int linenr, const char code[], bool expand = true, Settings::PlatformType platform = Settings::Native, const char* filename = "test.cpp", bool cpp11 = true) { errout.str(""); settings1.debugwarnings = true; @@ -453,7 +454,7 @@ class TestTokenizer : public TestFixture { // tokenize.. Tokenizer tokenizer(&settings1, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, linenr); // filter out ValueFlow messages.. const std::string debugwarnings = errout.str(); @@ -471,7 +472,8 @@ class TestTokenizer : public TestFixture { return ""; } - std::string tokenizeAndStringifyWindows(const char code[], bool expand = true, Settings::PlatformType platform = Settings::Native, const char* filename = "test.cpp", bool cpp11 = true) { +#define tokenizeAndStringifyWindows(...) tokenizeAndStringifyWindows_(__FILE__, __LINE__, __VA_ARGS__) + std::string tokenizeAndStringifyWindows_(const char* file, int linenr, const char code[], bool expand = true, Settings::PlatformType platform = Settings::Native, const char* filename = "test.cpp", bool cpp11 = true) { errout.str(""); settings_windows.debugwarnings = true; @@ -481,7 +483,7 @@ class TestTokenizer : public TestFixture { // tokenize.. Tokenizer tokenizer(&settings_windows, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, linenr); // filter out ValueFlow messages.. const std::string debugwarnings = errout.str(); @@ -499,19 +501,20 @@ class TestTokenizer : public TestFixture { return ""; } - std::string tokenizeAndStringify(const char code[], const Settings &settings, const char filename[] = "test.cpp") { + std::string tokenizeAndStringify_(const char* file, int line, const char code[], const Settings &settings, const char filename[] = "test.cpp") { errout.str(""); // tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); if (!tokenizer.tokens()) return ""; return tokenizer.tokens()->stringifyList(false, true, false, true, false, nullptr, nullptr); } - std::string tokenizeDebugListing(const char code[], const char filename[] = "test.cpp") { +#define tokenizeDebugListing(...) tokenizeDebugListing_(__FILE__, __LINE__, __VA_ARGS__) + std::string tokenizeDebugListing_(const char* file, int line, const char code[], const char filename[] = "test.cpp") { errout.str(""); settings2.standards.c = Standards::C89; @@ -519,7 +522,7 @@ class TestTokenizer : public TestFixture { Tokenizer tokenizer(&settings2, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); // result.. return tokenizer.tokens()->stringifyList(true,true,true,true,false); @@ -2807,7 +2810,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); // A body {} ASSERT_EQUALS(true, tok->linkAt(2) == tok->tokAt(9)); @@ -2832,7 +2835,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); // a[10] ASSERT_EQUALS(true, tok->linkAt(7) == tok->tokAt(9)); @@ -2856,7 +2859,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); // foo( ASSERT_EQUALS(true, tok->linkAt(6) == tok->tokAt(10)); @@ -2876,7 +2879,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); // template< ASSERT_EQUALS(true, tok->tokAt(6) == tok->linkAt(4)); @@ -2904,7 +2907,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); // static_cast< @@ -2921,7 +2924,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); // nvwa<(x > y)> @@ -2937,7 +2940,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); // B<..> @@ -2953,7 +2956,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); ASSERT_EQUALS(true, tok->tokAt(1) == tok->linkAt(18)); @@ -2970,7 +2973,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); ASSERT_EQUALS(true, tok->tokAt(2) == tok->linkAt(4)); @@ -2986,7 +2989,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); ASSERT_EQUALS(true, tok->tokAt(3) == tok->linkAt(9)); @@ -3001,7 +3004,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); ASSERT_EQUALS(true, tok->linkAt(3) == nullptr); @@ -3013,7 +3016,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); ASSERT_EQUALS(true, tok->linkAt(4) == nullptr); @@ -3025,7 +3028,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); ASSERT_EQUALS(true, tok->linkAt(1) == tok->tokAt(5)); @@ -3037,7 +3040,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); ASSERT_EQUALS(true, tok->linkAt(3) == nullptr); @@ -3049,7 +3052,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); ASSERT_EQUALS(true, tok->linkAt(1) == tok->tokAt(7)); } @@ -3060,7 +3063,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); ASSERT_EQUALS(true, tok->linkAt(1) == tok->tokAt(7)); } @@ -3070,7 +3073,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); ASSERT_EQUALS(true, tok->linkAt(1) == tok->tokAt(7)); ASSERT_EQUALS(true, tok->tokAt(1) == tok->linkAt(7)); @@ -3081,7 +3084,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); ASSERT_EQUALS(true, tok->linkAt(4) == tok->tokAt(8)); ASSERT_EQUALS(true, tok->tokAt(4) == tok->linkAt(8)); @@ -3092,7 +3095,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); ASSERT_EQUALS(true, tok->linkAt(1) == tok->tokAt(4)); ASSERT_EQUALS(true, tok->tokAt(1) == tok->linkAt(4)); @@ -3104,7 +3107,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = tokenizer.tokens(); ASSERT_EQUALS(true, tok->linkAt(1) == tok->tokAt(4)); // @@ -3124,7 +3127,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok = Token::findsimplematch(tokenizer.tokens(), "<"); ASSERT_EQUALS(true, tok->link() == tok->tokAt(4)); ASSERT_EQUALS(true, tok->linkAt(4) == tok); @@ -3138,7 +3141,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok1 = Token::findsimplematch(tokenizer.tokens(), "struct")->tokAt(2); const Token *tok2 = Token::findsimplematch(tokenizer.tokens(), "{")->previous(); ASSERT_EQUALS(true, tok1->link() == tok2); @@ -3151,7 +3154,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok1 = Token::findsimplematch(tokenizer.tokens(), "< Y"); const Token *tok2 = Token::findsimplematch(tok1, "> copy"); ASSERT_EQUALS(true, tok1->link() == tok2); @@ -3164,7 +3167,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok1 = tokenizer.tokens()->next(); const Token *tok2 = tok1->tokAt(2); ASSERT_EQUALS(true, tok1->link() == tok2); @@ -3177,7 +3180,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *tok1 = Token::findsimplematch(tokenizer.tokens(), "<"); const Token *tok2 = tok1->tokAt(2); ASSERT_EQUALS(true, tok1->link() == tok2); @@ -3190,7 +3193,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *A = Token::findsimplematch(tokenizer.tokens(), "A <"); ASSERT_EQUALS(true, A->next()->link() == A->tokAt(3)); } @@ -3201,7 +3204,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT_EQUALS(true, Token::simpleMatch(tokenizer.tokens()->next()->link(), "> void")); } @@ -3210,7 +3213,7 @@ class TestTokenizer : public TestFixture { const char code[] = "a = f(x%x<--a==x>x);"; Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(nullptr == Token::findsimplematch(tokenizer.tokens(), "<")->link()); } @@ -3219,7 +3222,7 @@ class TestTokenizer : public TestFixture { const char code[] = "using std::list; list l;"; Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(nullptr != Token::findsimplematch(tokenizer.tokens(), "<")->link()); } @@ -3231,7 +3234,7 @@ class TestTokenizer : public TestFixture { "}"; Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(nullptr != Token::findsimplematch(tokenizer.tokens(), "<")->link()); } @@ -3243,7 +3246,7 @@ class TestTokenizer : public TestFixture { "}\n"; Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(nullptr != Token::findsimplematch(tokenizer.tokens(), "> ::")->link()); } @@ -3255,7 +3258,7 @@ class TestTokenizer : public TestFixture { "};\n"; Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(nullptr != Token::findsimplematch(tokenizer.tokens(), "> [")->link()); } @@ -3268,7 +3271,7 @@ class TestTokenizer : public TestFixture { "template using baz = g;\n"; Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(nullptr != Token::findsimplematch(tokenizer.tokens(), "> ;")->link()); } @@ -3283,7 +3286,7 @@ class TestTokenizer : public TestFixture { "auto f = -e<1> == 0;\n"; Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(nullptr != Token::findsimplematch(tokenizer.tokens(), "> ==")->link()); } @@ -3302,7 +3305,7 @@ class TestTokenizer : public TestFixture { "}\n"; Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT(nullptr != Token::findsimplematch(tokenizer.tokens(), "> . f (")->link()); } @@ -3312,7 +3315,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token* tok1 = Token::findsimplematch(tokenizer.tokens(), "< class"); const Token* tok2 = Token::findsimplematch(tok1, "> class"); ASSERT_EQUALS(true, tok1->link() == tok2); @@ -3325,7 +3328,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token* tok1 = Token::findsimplematch(tokenizer.tokens(), "< template"); const Token* tok2 = Token::findsimplematch(tok1, "> struct"); ASSERT_EQUALS(true, tok1->link() == tok2); @@ -3562,7 +3565,7 @@ class TestTokenizer : public TestFixture { // tokenize.. Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); // Expected result.. ASSERT_EQUALS(expected, tokenizer.tokens()->stringifyList(nullptr, false)); @@ -3589,7 +3592,7 @@ class TestTokenizer : public TestFixture { // tokenize.. Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); ASSERT_EQUALS(expected, tokenizer.tokens()->stringifyList(nullptr, false)); const Token * VAS_Fail = Token::findsimplematch(tokenizer.tokens(), "VAS_Fail"); @@ -3609,7 +3612,7 @@ class TestTokenizer : public TestFixture { // tokenize.. Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); // Expected result.. ASSERT_EQUALS(expected, tokenizer.tokens()->stringifyList(nullptr, false)); @@ -3644,7 +3647,7 @@ class TestTokenizer : public TestFixture { // tokenize.. Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); // Expected result.. ASSERT_EQUALS(expected, tokenizer.tokens()->stringifyList(nullptr, false)); @@ -3685,7 +3688,7 @@ class TestTokenizer : public TestFixture { // tokenize.. Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); // Expected result.. ASSERT_EQUALS(expected, tokenizer.tokens()->stringifyList(nullptr, false)); @@ -4330,7 +4333,7 @@ class TestTokenizer : public TestFixture { errout.str(""); Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); const Token *x = Token::findsimplematch(tokenizer.tokens(), "x"); ASSERT_EQUALS(1, x->bits()); } @@ -6354,10 +6357,11 @@ class TestTokenizer : public TestFixture { tokenizeAndStringify(code.c_str()); // just survive... } - bool isStartOfExecutableScope(int offset, const char code[]) { +#define isStartOfExecutableScope(offset, code) isStartOfExecutableScope_(offset, code, __FILE__, __LINE__) + bool isStartOfExecutableScope_(int offset, const char code[], const char* file, int line) { Tokenizer tokenizer(&settings0, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); return Tokenizer::startOfExecutableScope(tokenizer.tokens()->tokAt(offset)) != nullptr; } @@ -6894,7 +6898,7 @@ class TestTokenizer : public TestFixture { // tokenize.. Tokenizer tokenizer(&s, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); } void checkConfiguration() { @@ -6918,7 +6922,7 @@ class TestTokenizer : public TestFixture { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); tokenizer.printUnknownTypes(); @@ -6938,7 +6942,7 @@ class TestTokenizer : public TestFixture { Settings settings; Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT(tokenizer.tokenize(istr, "test.cpp")); for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) { ASSERT_EQUALS(tok->str() == "(", tok->isCast()); diff --git a/test/testtokenrange.cpp b/test/testtokenrange.cpp index 9c5f0d5e75e..6286999d7e8 100644 --- a/test/testtokenrange.cpp +++ b/test/testtokenrange.cpp @@ -104,7 +104,7 @@ class TestTokenRange : public TestFixture { Settings settings; Tokenizer tokenizer{ &settings, nullptr }; std::istringstream sample("void a(){} void main(){ if(true){a();} }"); - tokenizer.tokenize(sample, "test.cpp"); + ASSERT(tokenizer.tokenize(sample, "test.cpp")); const SymbolDatabase* sd = tokenizer.getSymbolDatabase(); const Scope& scope = *std::next(sd->scopeList.begin(), 3); //The scope of the if block diff --git a/test/testtype.cpp b/test/testtype.cpp index f80ec8a8a76..e3fa69c7db7 100644 --- a/test/testtype.cpp +++ b/test/testtype.cpp @@ -40,7 +40,8 @@ class TestType : public TestFixture { TEST_CASE(checkFloatToIntegerOverflow); } - void check(const char code[], Settings* settings = nullptr, const char filename[] = "test.cpp", const std::string& standard = "c++11") { +#define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) + void check_(const char* file, int line, const char code[], Settings* settings = nullptr, const char filename[] = "test.cpp", const std::string& standard = "c++11") { // Clear the error buffer.. errout.str(""); @@ -55,7 +56,7 @@ class TestType : public TestFixture { // Tokenize.. Tokenizer tokenizer(settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); // Check.. CheckType checkType(&tokenizer, settings, this); diff --git a/test/testuninitvar.cpp b/test/testuninitvar.cpp index 58c7fe28a77..34f65445b54 100644 --- a/test/testuninitvar.cpp +++ b/test/testuninitvar.cpp @@ -86,7 +86,7 @@ class TestUninitVar : public TestFixture { TEST_CASE(trac_4871); TEST_CASE(syntax_error); // Ticket #5073 TEST_CASE(trac_5970); - TEST_CASE(valueFlowUninit); + TEST_CASE(valueFlowUninitTest); TEST_CASE(valueFlowUninitBreak); TEST_CASE(valueFlowUninitStructMembers); TEST_CASE(uninitvar_ipa); @@ -96,10 +96,11 @@ class TestUninitVar : public TestFixture { TEST_CASE(isVariableUsageDeref); // *p // whole program analysis - TEST_CASE(ctu); + TEST_CASE(ctuTest); } - void checkUninitVar(const char code[], const char fname[] = "test.cpp", bool debugwarnings = false) { +#define checkUninitVar(...) checkUninitVar_(__FILE__, __LINE__, __VA_ARGS__) + void checkUninitVar_(const char* file, int line, const char code[], const char fname[] = "test.cpp", bool debugwarnings = false) { // Clear the error buffer.. errout.str(""); @@ -107,7 +108,7 @@ class TestUninitVar : public TestFixture { settings.debugwarnings = debugwarnings; Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, fname); + ASSERT_LOC(tokenizer.tokenize(istr, fname), file, line); // Check for redundant code.. CheckUninitVar checkuninitvar(&tokenizer, &settings, this); @@ -3397,6 +3398,7 @@ class TestUninitVar : public TestFixture { TODO_ASSERT_EQUALS("error", "", errout.str()); } +#define valueFlowUninit(...) valueFlowUninit_(__FILE__, __LINE__, __VA_ARGS__) void valueFlowUninit2_value() { valueFlowUninit("void f() {\n" @@ -4504,7 +4506,7 @@ class TestUninitVar : public TestFixture { // FP Unknown type ASSERT_EQUALS("", errout.str()); } - void valueFlowUninit(const char code[], const char fname[] = "test.cpp") + void valueFlowUninit_(const char* file, int line, const char code[], const char fname[] = "test.cpp") { // Clear the error buffer.. errout.str(""); @@ -4515,14 +4517,15 @@ class TestUninitVar : public TestFixture { Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, fname); + ASSERT_LOC(tokenizer.tokenize(istr, fname), file, line); // Check for redundant code.. CheckUninitVar checkuninitvar(&tokenizer, &settings, this); - checkuninitvar.valueFlowUninit(); + (checkuninitvar.valueFlowUninit)(); } - void valueFlowUninit() { +#define ctu(code) ctu_(__FILE__, __LINE__, code) + void valueFlowUninitTest() { // #9735 - FN valueFlowUninit("typedef struct\n" "{\n" @@ -5960,14 +5963,14 @@ class TestUninitVar : public TestFixture { ASSERT_EQUALS("", errout.str()); } - void ctu(const char code[]) { + void ctu_(const char* file, int line, const char code[]) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); CTU::FileInfo *ctu = CTU::getFileInfo(&tokenizer); @@ -5983,7 +5986,7 @@ class TestUninitVar : public TestFixture { delete ctu; } - void ctu() { + void ctuTest() { ctu("void f(int *p) {\n" " a = *p;\n" "}\n" diff --git a/test/testunusedfunctions.cpp b/test/testunusedfunctions.cpp index 678b720f41f..e7f9de50986 100644 --- a/test/testunusedfunctions.cpp +++ b/test/testunusedfunctions.cpp @@ -68,7 +68,8 @@ class TestUnusedFunctions : public TestFixture { TEST_CASE(operatorOverload); } - void check(const char code[], Settings::PlatformType platform = Settings::Native) { +#define check(...) check_(__FILE__, __LINE__, __VA_ARGS__) + void check_(const char* file, int line, const char code[], Settings::PlatformType platform = Settings::Native) { // Clear the error buffer.. errout.str(""); @@ -77,13 +78,13 @@ class TestUnusedFunctions : public TestFixture { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check for unused functions.. CheckUnusedFunctions checkUnusedFunctions(&tokenizer, &settings, this); checkUnusedFunctions.parseTokens(tokenizer, "someFile.c", &settings); // check() returns error if and only if errout is not empty. - if (checkUnusedFunctions.check(this, settings)) { + if ((checkUnusedFunctions.check)(this, settings)) { ASSERT(errout.str() != ""); } else { ASSERT_EQUALS("", errout.str()); @@ -439,13 +440,13 @@ class TestUnusedFunctions : public TestFixture { Tokenizer tokenizer2(&settings, this); std::istringstream istr(code); - tokenizer2.tokenize(istr, fname.str().c_str()); + ASSERT(tokenizer2.tokenize(istr, fname.str().c_str())); c.parseTokens(tokenizer2, "someFile.c", &settings); } // Check for unused functions.. - c.check(this, settings); + (c.check)(this, settings); ASSERT_EQUALS("[test1.cpp:1]: (style) The function 'f' is never used.\n", errout.str()); } diff --git a/test/testunusedvar.cpp b/test/testunusedvar.cpp index 51888a3b06f..64642c28498 100644 --- a/test/testunusedvar.cpp +++ b/test/testunusedvar.cpp @@ -226,7 +226,9 @@ class TestUnusedVar : public TestFixture { TEST_CASE(globalData); } - void checkStructMemberUsage(const char code[], const std::list *directives=nullptr) { +#define functionVariableUsage(...) functionVariableUsage_(__FILE__, __LINE__, __VA_ARGS__) +#define checkStructMemberUsage(...) checkStructMemberUsage_(__FILE__, __LINE__, __VA_ARGS__) + void checkStructMemberUsage_(const char* file, int line, const char code[], const std::list* directives = nullptr) { // Clear the error buffer.. errout.str(""); @@ -238,11 +240,11 @@ class TestUnusedVar : public TestFixture { Tokenizer tokenizer(&settings, this); tokenizer.setPreprocessor(&preprocessor); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check for unused variables.. CheckUnusedVar checkUnusedVar(&tokenizer, &settings, this); - checkUnusedVar.checkStructMemberUsage(); + (checkUnusedVar.checkStructMemberUsage)(); } void isRecordTypeWithoutSideEffects() { @@ -1572,15 +1574,14 @@ class TestUnusedVar : public TestFixture { ASSERT_EQUALS("[test.cpp:3]: (style) struct member 'S::E' is never used.\n", errout.str()); } - void functionVariableUsage(const char code[], const char filename[]="test.cpp") { + void functionVariableUsage_(const char* file, int line, const char code[], const char filename[] = "test.cpp") { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - if (!tokenizer.tokenize(istr, filename)) - return; + ASSERT_LOC(tokenizer.tokenize(istr, filename), file, line); // Check for unused variables.. CheckUnusedVar checkUnusedVar(&tokenizer, &settings, this); diff --git a/test/testvaarg.cpp b/test/testvaarg.cpp index 1a720142b3c..46d9a790f30 100644 --- a/test/testvaarg.cpp +++ b/test/testvaarg.cpp @@ -30,14 +30,15 @@ class TestVaarg : public TestFixture { private: Settings settings; - void check(const char code[]) { +#define check(code) check_(code, __FILE__, __LINE__) + void check_(const char code[], const char* file, int line) { // Clear the error buffer.. errout.str(""); // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); // Check.. CheckVaarg checkVaarg; diff --git a/test/testvalueflow.cpp b/test/testvalueflow.cpp index bfec5dfada3..13823a49ab0 100755 --- a/test/testvalueflow.cpp +++ b/test/testvalueflow.cpp @@ -178,11 +178,12 @@ class TestValueFlow : public TestFixture { return !val.isImpossible(); } - bool testValueOfXKnown(const char code[], unsigned int linenr, int value) { +#define testValueOfXKnown(...) testValueOfXKnown_(__FILE__, __LINE__, __VA_ARGS__) + bool testValueOfXKnown_(const char* file, int line, const char code[], unsigned int linenr, int value) { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) { if (tok->str() == "x" && tok->linenr() == linenr) { @@ -198,11 +199,11 @@ class TestValueFlow : public TestFixture { return false; } - bool testValueOfXKnown(const char code[], unsigned int linenr, const std::string& expr, int value) { + bool testValueOfXKnown_(const char* file, int line, const char code[], unsigned int linenr, const std::string& expr, int value) { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); for (const Token* tok = tokenizer.tokens(); tok; tok = tok->next()) { if (tok->str() == "x" && tok->linenr() == linenr) { @@ -218,11 +219,12 @@ class TestValueFlow : public TestFixture { return false; } - bool testValueOfXImpossible(const char code[], unsigned int linenr, int value) { +#define testValueOfXImpossible(...) testValueOfXImpossible_(__FILE__, __LINE__, __VA_ARGS__) + bool testValueOfXImpossible_(const char* file, int line, const char code[], unsigned int linenr, int value) { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) { if (tok->str() == "x" && tok->linenr() == linenr) { @@ -238,12 +240,12 @@ class TestValueFlow : public TestFixture { return false; } - bool testValueOfXImpossible(const char code[], unsigned int linenr, const std::string& expr, int value) + bool testValueOfXImpossible_(const char* file, int line, const char code[], unsigned int linenr, const std::string& expr, int value) { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); for (const Token* tok = tokenizer.tokens(); tok; tok = tok->next()) { if (tok->str() == "x" && tok->linenr() == linenr) { @@ -259,11 +261,12 @@ class TestValueFlow : public TestFixture { return false; } - bool testValueOfXInconclusive(const char code[], unsigned int linenr, int value) { +#define testValueOfXInconclusive(code, linenr, value) testValueOfXInconclusive_(code, linenr, value, __FILE__, __LINE__) + bool testValueOfXInconclusive_(const char code[], unsigned int linenr, int value, const char* file, int line) { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) { if (tok->str() == "x" && tok->linenr() == linenr) { @@ -279,11 +282,12 @@ class TestValueFlow : public TestFixture { return false; } - bool testValueOfX(const char code[], unsigned int linenr, int value) { +#define testValueOfX(...) testValueOfX_(__FILE__, __LINE__, __VA_ARGS__) + bool testValueOfX_(const char* file, int line, const char code[], unsigned int linenr, int value) { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) { if (tok->str() == "x" && tok->linenr() == linenr) { @@ -297,12 +301,12 @@ class TestValueFlow : public TestFixture { return false; } - bool testValueOfX(const char code[], unsigned int linenr, const std::string& expr, int value) + bool testValueOfX_(const char* file, int line, const char code[], unsigned int linenr, const std::string& expr, int value) { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); for (const Token* tok = tokenizer.tokens(); tok; tok = tok->next()) { if (tok->str() == "x" && tok->linenr() == linenr) { @@ -317,11 +321,11 @@ class TestValueFlow : public TestFixture { return false; } - bool testValueOfX(const char code[], unsigned int linenr, float value, float diff) { + bool testValueOfX_(const char* file, int line, const char code[], unsigned int linenr, float value, float diff) { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) { if (tok->str() == "x" && tok->linenr() == linenr) { @@ -336,11 +340,12 @@ class TestValueFlow : public TestFixture { return false; } - std::string getErrorPathForX(const char code[], unsigned int linenr) { +#define getErrorPathForX(code, linenr) getErrorPathForX_(code, linenr, __FILE__, __LINE__) + std::string getErrorPathForX_(const char code[], unsigned int linenr, const char* file, int line) { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) { if (tok->str() != "x" || tok->linenr() != linenr) @@ -360,11 +365,11 @@ class TestValueFlow : public TestFixture { return ""; } - bool testValueOfX(const char code[], unsigned int linenr, const char value[], ValueFlow::Value::ValueType type) { + bool testValueOfX_(const char* file, int line, const char code[], unsigned int linenr, const char value[], ValueFlow::Value::ValueType type) { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) { if (tok->str() == "x" && tok->linenr() == linenr) { @@ -378,11 +383,12 @@ class TestValueFlow : public TestFixture { return false; } - bool testLifetimeOfX(const char code[], unsigned int linenr, const char value[], ValueFlow::Value::LifetimeScope lifetimeScope = ValueFlow::Value::LifetimeScope::Local) { +#define testLifetimeOfX(...) testLifetimeOfX_(__FILE__, __LINE__, __VA_ARGS__) + bool testLifetimeOfX_(const char* file, int line, const char code[], unsigned int linenr, const char value[], ValueFlow::Value::LifetimeScope lifetimeScope = ValueFlow::Value::LifetimeScope::Local) { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) { if (tok->str() == "x" && tok->linenr() == linenr) { @@ -396,11 +402,11 @@ class TestValueFlow : public TestFixture { return false; } - bool testValueOfX(const char code[], unsigned int linenr, int value, ValueFlow::Value::ValueType type) { + bool testValueOfX_(const char* file, int line, const char code[], unsigned int linenr, int value, ValueFlow::Value::ValueType type) { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) { if (tok->str() == "x" && tok->linenr() == linenr) { @@ -414,11 +420,11 @@ class TestValueFlow : public TestFixture { return false; } - bool testValueOfX(const char code[], unsigned int linenr, ValueFlow::Value::MoveKind moveKind) { + bool testValueOfX_(const char* file, int line, const char code[], unsigned int linenr, ValueFlow::Value::MoveKind moveKind) { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) { if (tok->str() == "x" && tok->linenr() == linenr) { @@ -432,11 +438,12 @@ class TestValueFlow : public TestFixture { return false; } - bool testConditionalValueOfX(const char code[], unsigned int linenr, int value) { +#define testConditionalValueOfX(code, linenr, value) testConditionalValueOfX_(code, linenr, value, __FILE__, __LINE__) + bool testConditionalValueOfX_(const char code[], unsigned int linenr, int value, const char* file, int line) { // Tokenize.. Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) { if (tok->str() == "x" && tok->linenr() == linenr) { @@ -470,29 +477,31 @@ class TestValueFlow : public TestFixture { settings.debugwarnings = false; } - std::list tokenValues(const char code[], const char tokstr[], const Settings *s = nullptr) { +#define tokenValues(...) tokenValues_(__FILE__, __LINE__, __VA_ARGS__) + std::list tokenValues_(const char* file, int line, const char code[], const char tokstr[], const Settings *s = nullptr) { Tokenizer tokenizer(s ? s : &settings, this); std::istringstream istr(code); errout.str(""); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); const Token *tok = Token::findmatch(tokenizer.tokens(), tokstr); return tok ? tok->values() : std::list(); } - std::list tokenValues(const char code[], const char tokstr[], ValueFlow::Value::ValueType vt, const Settings *s = nullptr) { - std::list values = tokenValues(code, tokstr, s); + std::list tokenValues_(const char* file, int line, const char code[], const char tokstr[], ValueFlow::Value::ValueType vt, const Settings *s = nullptr) { + std::list values = tokenValues_(file, line, code, tokstr, s); values.remove_if([&](const ValueFlow::Value& v) { return v.valueType != vt; }); return values; } - std::vector lifetimeValues(const char code[], const char tokstr[], const Settings *s = nullptr) { +#define lifetimeValues(...) lifetimeValues_(__FILE__, __LINE__, __VA_ARGS__) + std::vector lifetimeValues_(const char* file, int line, const char code[], const char tokstr[], const Settings *s = nullptr) { std::vector result; Tokenizer tokenizer(s ? s : &settings, this); std::istringstream istr(code); errout.str(""); - tokenizer.tokenize(istr, "test.cpp"); + ASSERT_LOC(tokenizer.tokenize(istr, "test.cpp"), file, line); const Token *tok = Token::findmatch(tokenizer.tokens(), tokstr); if (!tok) return result; @@ -506,8 +515,9 @@ class TestValueFlow : public TestFixture { return result; } - ValueFlow::Value valueOfTok(const char code[], const char tokstr[]) { - std::list values = tokenValues(code, tokstr); +#define valueOfTok(code, tokstr) valueOfTok_(code, tokstr, __FILE__, __LINE__) + ValueFlow::Value valueOfTok_(const char code[], const char tokstr[], const char* file, int line) { + std::list values = tokenValues_(file, line, code, tokstr); return values.size() == 1U && !values.front().isTokValue() ? values.front() : ValueFlow::Value(); } diff --git a/test/testvarid.cpp b/test/testvarid.cpp index 1b97cabe95d..aae6278df0e 100644 --- a/test/testvarid.cpp +++ b/test/testvarid.cpp @@ -219,7 +219,8 @@ class TestVarID : public TestFixture { TEST_CASE(structuredBindings); } - std::string tokenize(const char code[], const char filename[] = "test.cpp") { +#define tokenize(...) tokenize_(__FILE__, __LINE__, __VA_ARGS__) + std::string tokenize_(const char* file, int line, const char code[], const char filename[] = "test.cpp") { errout.str(""); Settings settings; @@ -230,7 +231,7 @@ class TestVarID : public TestFixture { Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC((tokenizer.tokenize)(istr, filename), file, line); // result.. Token::stringifyOptions options = Token::stringifyOptions::forDebugVarId(); @@ -238,7 +239,8 @@ class TestVarID : public TestFixture { return tokenizer.tokens()->stringifyList(options); } - std::string tokenizeExpr(const char code[], const char filename[] = "test.cpp") { +#define tokenizeExpr(...) tokenizeExpr_(__FILE__, __LINE__, __VA_ARGS__) + std::string tokenizeExpr_(const char* file, int line, const char code[], const char filename[] = "test.cpp") { errout.str(""); Settings settings; @@ -249,7 +251,7 @@ class TestVarID : public TestFixture { Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC((tokenizer.tokenize)(istr, filename), file, line); // result.. Token::stringifyOptions options = Token::stringifyOptions::forDebugExprId(); @@ -257,7 +259,8 @@ class TestVarID : public TestFixture { return tokenizer.tokens()->stringifyList(options); } - std::string compareVaridsForVariable(const char code[], const char varname[], const char filename[] = "test.cpp") { +#define compareVaridsForVariable(...) compareVaridsForVariable_(__FILE__, __LINE__, __VA_ARGS__) + std::string compareVaridsForVariable_(const char* file, int line, const char code[], const char varname[], const char filename[] = "test.cpp") { errout.str(""); Settings settings; @@ -268,7 +271,7 @@ class TestVarID : public TestFixture { Tokenizer tokenizer(&settings, this); std::istringstream istr(code); - tokenizer.tokenize(istr, filename); + ASSERT_LOC((tokenizer.tokenize)(istr, filename), file, line); unsigned int varid = ~0U; for (const Token *tok = tokenizer.tokens(); tok; tok = tok->next()) {