Skip to content

Commit 7a0804e

Browse files
authored
fix #13312: internalAstError for increment after label in lambda (danmar#7309)
1 parent 3a2bf21 commit 7a0804e

File tree

2 files changed

+16
-1
lines changed

2 files changed

+16
-1
lines changed

lib/tokenlist.cpp

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1803,7 +1803,7 @@ static Token * createAstAtToken(Token *tok)
18031803
!tok->previous() ||
18041804
Token::Match(tok, "%name% %op%|(|[|.|::|<|?|;") ||
18051805
(cpp && Token::Match(tok, "%name% {") && iscpp11init(tok->next())) ||
1806-
Token::Match(tok->previous(), "[;{}] %cop%|++|--|( !!{") ||
1806+
Token::Match(tok->previous(), "[;{}:] %cop%|++|--|( !!{") ||
18071807
Token::Match(tok->previous(), "[;{}] %num%|%str%|%char%") ||
18081808
Token::Match(tok->previous(), "[;{}] delete new") ||
18091809
(cpp && Token::Match(tok->previous(), "[;{}] ["))) {

test/testtokenize.cpp

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -469,6 +469,8 @@ class TestTokenizer : public TestFixture {
469469
TEST_CASE(funcnameInParenthesis3); // #13585
470470

471471
TEST_CASE(genericInIf); // #13561
472+
473+
TEST_CASE(preincrementInLambda); // #13312
472474
}
473475

474476
#define tokenizeAndStringify(...) tokenizeAndStringify_(__FILE__, __LINE__, __VA_ARGS__)
@@ -8416,6 +8418,19 @@ class TestTokenizer : public TestFixture {
84168418
const char ast[] = "(( if (( _Generic (, (, (, (, s 1) (? a (: (, b c) d))) 3) 0)))";
84178419
ASSERT_EQUALS(ast, testAst(code, AstStyle::Z3));
84188420
}
8421+
8422+
void preincrementInLambda() { // #13312
8423+
const char code[] =
8424+
"void f(const std::vector<int>& v, int a) {\n"
8425+
" std::for_each(v.begin(), v.end(), [&](int i) {\n"
8426+
" switch (i) {\n"
8427+
" default:\n"
8428+
" ++a;\n"
8429+
" }\n"
8430+
" });\n"
8431+
"}\n";
8432+
ASSERT_NO_THROW(tokenizeAndStringify(code));
8433+
}
84198434
};
84208435

84218436
REGISTER_TEST(TestTokenizer)

0 commit comments

Comments
 (0)