Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions lib/tokenize.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -7631,16 +7631,20 @@ void Tokenizer::simplifyStaticConst()
}

// Move the qualifier to the left-most position in the declaration
const int column = tok->next()->column();
tok->deleteNext();
if (!leftTok) {
list.front()->insertToken(qualifiers[i]);
list.front()->swapWithNext();
list.front()->column(column);
tok = list.front();
} else if (leftTok->next()) {
leftTok->next()->insertTokenBefore(qualifiers[i]);
leftTok->next()->column(column);
tok = leftTok->next();
} else {
leftTok->insertToken(qualifiers[i]);
leftTok->next()->column(column);
tok = leftTok;
}
}
Expand Down Expand Up @@ -9222,15 +9226,18 @@ void Tokenizer::simplifyStructDecl()
while (!Token::Match(start, "struct|class|union|enum")) {
after->insertToken(start->str());
after = after->next();
after->column(start->column());
start->deleteThis();
}
tok = start;
if (!after)
break; // see #4869 segmentation fault in Tokenizer::simplifyStructDecl (invalid code)
after->insertToken(type->str());
after->next()->column(type->column());
if (start->str() != "class") {
after->insertToken(start->str());
after = after->next();
after->column(start->column());
}

after = after->tokAt(2);
Expand Down
26 changes: 26 additions & 0 deletions test/testtokenize.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -506,6 +506,10 @@ class TestTokenizer : public TestFixture {
TEST_CASE(dumpFallthrough);

TEST_CASE(simplifyRedundantParentheses);

TEST_CASE(simplifyEnum1);

TEST_CASE(simplifyEnum2);
}

class TokenizerTest : public Tokenizer
Expand Down Expand Up @@ -8810,6 +8814,28 @@ class TestTokenizer : public TestFixture {
SimpleTokenizer tokenizer(settingsDefault, *this, false);
ASSERT_NO_THROW(tokenizer.tokenize(code));
}

void simplifyEnum1() {
const char code[] = "static enum {A,B} ab;";
ASSERT_EQUALS("enum Anonymous0 { A , B } ; static enum Anonymous0 ab ;", tokenizeAndStringify(code));
SimpleTokenizer tokenizer(settingsDefault, *this);
tokenizer.tokenize(code);
const Token* tok = Token::findsimplematch(tokenizer.tokens(), "static");
ASSERT(tok);
ASSERT_EQUALS(tok->column(), 1);
ASSERT_EQUALS(tok->next()->column(), 8);
}

void simplifyEnum2() {
const char code[] = "enum AB {A,B}; enum AB static ab; ";
ASSERT_EQUALS("enum AB { A , B } ; static enum AB ab ;", tokenizeAndStringify(code));
SimpleTokenizer tokenizer(settingsDefault, *this);
tokenizer.tokenize(code);
const Token* tok = Token::findsimplematch(tokenizer.tokens(), "static");
ASSERT(tok);
ASSERT_EQUALS(tok->column(), 24);
ASSERT_EQUALS(tok->next()->column(), 16);
}
};

REGISTER_TEST(TestTokenizer)
Loading