diff --git a/test/cli/other_test.py b/test/cli/other_test.py index a747493e48d..bd7030d88bd 100644 --- a/test/cli/other_test.py +++ b/test/cli/other_test.py @@ -3653,3 +3653,35 @@ def test_debug_syntaxerror_c(tmp_path): assert stderr.splitlines() == [ "{}:2:1: error: Code 'template<...' is invalid C code. [syntaxError]".format(test_file) ] + + +def test_ast_max_depth(tmp_path): + test_file = tmp_path / 'test.cpp' + with open(test_file, "w") as f: + f.write( +""" +#define PTR1 (* (* (* (* +#define PTR2 PTR1 PTR1 PTR1 PTR1 +#define PTR3 PTR2 PTR2 PTR2 PTR2 +#define PTR4 PTR3 PTR3 PTR3 PTR3 + +#define RBR1 ) ) ) ) +#define RBR2 RBR1 RBR1 RBR1 RBR1 +#define RBR3 RBR2 RBR2 RBR2 RBR2 +#define RBR4 RBR3 RBR3 RBR3 RBR3 + +int PTR4 q4_var RBR4 = 0; +""") + + args = [ + '-q', + '--template=simple', + str(test_file) + ] + + exitcode, stdout, stderr = cppcheck(args) + assert exitcode == 0, stdout + assert stdout.splitlines() == [] + assert stderr.splitlines() == [ + '{}:12:5: error: maximum AST depth exceeded [internalAstError]'.format(test_file) + ] \ No newline at end of file diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 6e468ad524d..31e01340307 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -8662,51 +8662,3 @@ class TestTokenizer : public TestFixture { }; REGISTER_TEST(TestTokenizer) - -class TestTokenizerCompileLimits : public TestFixture -{ -public: - TestTokenizerCompileLimits() : TestFixture("TestTokenizerCompileLimits") {} - -private: - void run() override - { - TEST_CASE(test); // #5592 crash: gcc: testsuit: gcc.c-torture/compile/limits-declparen.c - } - -#define tokenizeAndStringify(...) tokenizeAndStringify_(__FILE__, __LINE__, __VA_ARGS__) - std::string tokenizeAndStringify_(const char* file, int linenr, const std::string& code) { - // tokenize.. - SimpleTokenizer tokenizer(settingsDefault, *this); - ASSERT_LOC(tokenizer.tokenize(code), file, linenr); - - if (tokenizer.tokens()) - return tokenizer.tokens()->stringifyList(false, true, false, true, false, nullptr, nullptr); - return ""; - } - - void test() { - const char raw_code[] = "#define PTR1 (* (* (* (*\n" - "#define PTR2 PTR1 PTR1 PTR1 PTR1\n" - "#define PTR3 PTR2 PTR2 PTR2 PTR2\n" - "#define PTR4 PTR3 PTR3 PTR3 PTR3\n" - "\n" - "#define RBR1 ) ) ) )\n" - "#define RBR2 RBR1 RBR1 RBR1 RBR1\n" - "#define RBR3 RBR2 RBR2 RBR2 RBR2\n" - "#define RBR4 RBR3 RBR3 RBR3 RBR3\n" - "\n" - "int PTR4 q4_var RBR4 = 0;\n"; - - // Preprocess file.. - simplecpp::OutputList outputList; - std::vector files; - const simplecpp::TokenList tokens1(raw_code, sizeof(raw_code), files, "", &outputList); - const std::string filedata = tokens1.stringify(); - const std::string code = PreprocessorHelper::getcodeforcfg(settingsDefault, *this, filedata, "", "test.c"); - - ASSERT_THROW_INTERNAL_EQUALS(tokenizeAndStringify(code), AST, "maximum AST depth exceeded"); - } -}; - -REGISTER_TEST(TestTokenizerCompileLimits)