diff --git a/lib/tokenize.cpp b/lib/tokenize.cpp index 80abb4cb2bb..96c69bb4dd9 100644 --- a/lib/tokenize.cpp +++ b/lib/tokenize.cpp @@ -8837,6 +8837,8 @@ void Tokenizer::findGarbageCode() const if (tok->str() == ";") { // do the counting semicolons++; } else if (tok->str() == ":") { + if (tok->strAt(-1) == ",") + syntaxError(tok); colons++; } else if (tok->str() == ")") { // skip pairs of ( ) tok = tok->link(); @@ -8960,8 +8962,6 @@ void Tokenizer::findGarbageCode() const syntaxError(tok); if (Token::Match(tok, ": [)]=]")) syntaxError(tok); - if (Token::simpleMatch(tok, ", :")) - syntaxError(tok); if (Token::Match(tok, "typedef [,;:]")) syntaxError(tok); if (Token::Match(tok, "? %assign%")) diff --git a/test/testtokenize.cpp b/test/testtokenize.cpp index 70b78a44250..dcbbd0201af 100644 --- a/test/testtokenize.cpp +++ b/test/testtokenize.cpp @@ -7681,6 +7681,8 @@ class TestTokenizer : public TestFixture { ASSERT_NO_THROW(tokenizeAndStringify("enum { E = sizeof(struct { int i; }) };")); // #13249 + ASSERT_NO_THROW(tokenizeAndStringify("struct S { unsigned u:2, :30; };")); // #14393 + ignore_errout(); }