Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 32 additions & 0 deletions test/cli/other_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -3653,3 +3653,35 @@ def test_debug_syntaxerror_c(tmp_path):
assert stderr.splitlines() == [
"{}:2:1: error: Code 'template<...' is invalid C code. [syntaxError]".format(test_file)
]


def test_ast_max_depth(tmp_path):
test_file = tmp_path / 'test.cpp'
with open(test_file, "w") as f:
f.write(
"""
#define PTR1 (* (* (* (*
#define PTR2 PTR1 PTR1 PTR1 PTR1
#define PTR3 PTR2 PTR2 PTR2 PTR2
#define PTR4 PTR3 PTR3 PTR3 PTR3

#define RBR1 ) ) ) )
#define RBR2 RBR1 RBR1 RBR1 RBR1
#define RBR3 RBR2 RBR2 RBR2 RBR2
#define RBR4 RBR3 RBR3 RBR3 RBR3

int PTR4 q4_var RBR4 = 0;
""")

args = [
'-q',
'--template=simple',
str(test_file)
]

exitcode, stdout, stderr = cppcheck(args)
assert exitcode == 0, stdout
assert stdout.splitlines() == []
assert stderr.splitlines() == [
'{}:12:5: error: maximum AST depth exceeded [internalAstError]'.format(test_file)
]
48 changes: 0 additions & 48 deletions test/testtokenize.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -8662,51 +8662,3 @@ class TestTokenizer : public TestFixture {
};

REGISTER_TEST(TestTokenizer)

class TestTokenizerCompileLimits : public TestFixture
{
public:
TestTokenizerCompileLimits() : TestFixture("TestTokenizerCompileLimits") {}

private:
void run() override
{
TEST_CASE(test); // #5592 crash: gcc: testsuit: gcc.c-torture/compile/limits-declparen.c
}

#define tokenizeAndStringify(...) tokenizeAndStringify_(__FILE__, __LINE__, __VA_ARGS__)
std::string tokenizeAndStringify_(const char* file, int linenr, const std::string& code) {
// tokenize..
SimpleTokenizer tokenizer(settingsDefault, *this);
ASSERT_LOC(tokenizer.tokenize(code), file, linenr);

if (tokenizer.tokens())
return tokenizer.tokens()->stringifyList(false, true, false, true, false, nullptr, nullptr);
return "";
}

void test() {
const char raw_code[] = "#define PTR1 (* (* (* (*\n"
"#define PTR2 PTR1 PTR1 PTR1 PTR1\n"
"#define PTR3 PTR2 PTR2 PTR2 PTR2\n"
"#define PTR4 PTR3 PTR3 PTR3 PTR3\n"
"\n"
"#define RBR1 ) ) ) )\n"
"#define RBR2 RBR1 RBR1 RBR1 RBR1\n"
"#define RBR3 RBR2 RBR2 RBR2 RBR2\n"
"#define RBR4 RBR3 RBR3 RBR3 RBR3\n"
"\n"
"int PTR4 q4_var RBR4 = 0;\n";

// Preprocess file..
simplecpp::OutputList outputList;
std::vector<std::string> files;
const simplecpp::TokenList tokens1(raw_code, sizeof(raw_code), files, "", &outputList);
const std::string filedata = tokens1.stringify();
const std::string code = PreprocessorHelper::getcodeforcfg(settingsDefault, *this, filedata, "", "test.c");

ASSERT_THROW_INTERNAL_EQUALS(tokenizeAndStringify(code), AST, "maximum AST depth exceeded");
}
};

REGISTER_TEST(TestTokenizerCompileLimits)