diff --git a/lib/tokenlist.cpp b/lib/tokenlist.cpp index 42b54b0c042..d4c2c59aee1 100644 --- a/lib/tokenlist.cpp +++ b/lib/tokenlist.cpp @@ -1803,7 +1803,7 @@ static Token * createAstAtToken(Token *tok) !tok->previous() || Token::Match(tok, "%name% %op%|(|[|.|::|<|?|;") || (cpp && Token::Match(tok, "%name% {") && iscpp11init(tok->next())) || - Token::Match(tok->previous(), "[;{}] %cop%|++|--|( !!{") || + Token::Match(tok->previous(), "[;{}:] %cop%|++|--|( !!{") || Token::Match(tok->previous(), "[;{}] %num%|%str%|%char%") || Token::Match(tok->previous(), "[;{}] delete new") || (cpp && Token::Match(tok->previous(), "[;{}] ["))) { diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index fc67ea64b4f..e33c42a820e 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -469,6 +469,8 @@ class TestTokenizer : public TestFixture { TEST_CASE(funcnameInParenthesis3); // #13585 TEST_CASE(genericInIf); // #13561 + + TEST_CASE(preincrementInLambda); // #13312 } #define tokenizeAndStringify(...) tokenizeAndStringify_(__FILE__, __LINE__, __VA_ARGS__) @@ -8416,6 +8418,19 @@ class TestTokenizer : public TestFixture { const char ast[] = "(( if (( _Generic (, (, (, (, s 1) (? a (: (, b c) d))) 3) 0)))"; ASSERT_EQUALS(ast, testAst(code, AstStyle::Z3)); } + + void preincrementInLambda() { // #13312 + const char code[] = + "void f(const std::vector& v, int a) {\n" + " std::for_each(v.begin(), v.end(), [&](int i) {\n" + " switch (i) {\n" + " default:\n" + " ++a;\n" + " }\n" + " });\n" + "}\n"; + ASSERT_NO_THROW(tokenizeAndStringify(code)); + } }; REGISTER_TEST(TestTokenizer)