Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 18 additions & 8 deletions cpp/ast.py
Original file line number Diff line number Diff line change
Expand Up @@ -718,9 +718,11 @@ def generate(self):
self.current_token = token

# Dispatch on the next token type.
if token.token_type == _INTERNAL_TOKEN:
if token.name == _NAMESPACE_POP:
self.namespace_stack.pop()
if (
token.token_type == _INTERNAL_TOKEN and
token.name == _NAMESPACE_POP
):
self.namespace_stack.pop()
continue

try:
Expand Down Expand Up @@ -767,7 +769,10 @@ def _generate_one(self, token):
# Handle data or function declaration/definition.
syntax = tokenize.SYNTAX
temp_tokens, last_token = \
self._get_var_tokens_up_to(syntax, '(', ';', '{', '[')
self._get_var_tokens_up_to(syntax, '(', ';', '{', '[', '}')
if last_token.name == '}':
return None

temp_tokens.insert(0, token)
if last_token.name == '(':
# If there is an assignment before the paren,
Expand Down Expand Up @@ -1071,6 +1076,10 @@ def _get_method(self, return_type_and_name, modifiers, templated_types,
self._add_back_token(token)
token = tokenize.Token(tokenize.SYNTAX, ';', 0, 0)

if token.name == '}':
self._add_back_token(token)
token = tokenize.Token(tokenize.SYNTAX, ';', 0, 0)

if token.token_type != tokenize.SYNTAX:
raise ParseError(token)
# Handle ctor initializers.
Expand Down Expand Up @@ -1270,8 +1279,10 @@ def _handle_class_and_struct(self, class_type, class_str, visibility):
next_token.name == ';')
variable = var_token
if is_syntax and not is_variable:
variable = next_token
temp = self._get_next_token()
temp = next_token
while variable.token_type != tokenize.NAME:
variable = temp
temp = self._get_next_token()
if temp.token_type == tokenize.SYNTAX and temp.name == '(':
# Handle methods declared to return a class/struct.
t0 = name_tokens[0]
Expand Down Expand Up @@ -1625,8 +1636,7 @@ def handle_namespace(self):
else:
assert token.name == '{', token
tokens = list(self.get_scope())
# Replace the trailing } with the internal namespace pop token.
tokens[-1] = internal_token
tokens.append(internal_token)
# Handle namespace with nothing in it.
self._add_back_tokens(tokens)
return None
Expand Down
3 changes: 1 addition & 2 deletions cpp/find_warnings.py
Original file line number Diff line number Diff line change
Expand Up @@ -401,8 +401,7 @@ def _find_public_function_warnings(self, node, name, primary_header,
# If the primary.filename == header.filename, it probably
# indicates an error elsewhere. It sucks to mask it,
# but false positives are worse.
if (primary_header and
primary_header.filename != header.filename):
if primary_header:
msg = ("expected to find '{}' in '{}', "
"but found in '{}'".format(name,
primary_header.filename,
Expand Down
25 changes: 19 additions & 6 deletions cpp/tokenize.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,15 +158,22 @@ def get_tokens(source):
elif source[i] == "'" and source[start:i] in _STR_PREFIXES:
token_type = CONSTANT
i = _get_string(source, i)
elif c == '/' and source[i + 1] == '/': # Find // comments.
elif c == '/' and source[i + 1] == '/': # Find // comments.
i = source.find('\n', i)
if i == -1: # Handle EOF.
i = end
continue
elif c == '/' and source[i + 1] == '*': # Find /* comments. */
elif c == '/' and source[i + 1] == '*': # Find /* comments. */
i = source.find('*/', i) + 2
continue
elif c in ':+-<>&!|*=': # : or :: (plus other chars).
elif c in '<>': # Handle '<' and '>' tokens.
token_type = SYNTAX
i += 1
new_ch = source[i]
if new_ch == c:
i += 1
new_ch = source[i]
if new_ch == '=':
i += 1
elif c in ':+-&|=': # Handle 'XX' and 'X=' tokens.
token_type = SYNTAX
i += 1
new_ch = source[i]
Expand All @@ -176,7 +183,13 @@ def get_tokens(source):
i += 1
elif new_ch == '=':
i += 1
elif c in '()[]{}~?^%;/.,': # Handle single char tokens.
elif c in '!*^%/': # Handle 'X=' tokens.
token_type = SYNTAX
i += 1
new_ch = source[i]
if new_ch == '=':
i += 1
elif c in '()[]{}~?;.,': # Handle single char tokens.
token_type = SYNTAX
i += 1
if c == '.' and source[i].isdigit():
Expand Down
4 changes: 3 additions & 1 deletion test/macro3.h
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
MyMacro(NS, Name)

namespace NS {

MyMacro()
MyMacro(NS)
MyMacro(NS, Name)
}
2 changes: 1 addition & 1 deletion test_ast.py
Original file line number Diff line number Diff line change
Expand Up @@ -661,7 +661,7 @@ def test_template_typedef(self):

def test_operators(self):
for operator in ('=', '+=', '-=', '*=', '==', '!=', '()', '[]', '<',
'>'):
'>', '^=', '<<=', '>>='):
code = 'void Foo::operator%s();' % operator
nodes = list(MakeBuilder(code).generate())
self.assertEqual(1, len(nodes))
Expand Down
5 changes: 3 additions & 2 deletions test_tokenize.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def testget_tokens_binary_operators(self):
self.assertEqual(Constant('3', 4, 5), tokens[2])

def testget_tokens_multi_char_binary_operators(self):
for operator in ('<<', '>>', '**'):
for operator in ('<<', '>>'):
# 0123456
tokens = self.get_tokens('5 %s 3' % operator)
self.assertEqual(3, len(tokens), tokens)
Expand Down Expand Up @@ -119,7 +119,8 @@ def testget_tokens_logical_operators(self):
self.assertEqual(Name('not', 1, 4), tokens[1])

def testget_tokens_operators(self):
for operator in ('+=', '-=', '*=', '==', '!='):
for operator in ('+=', '-=', '*=', '==', '!=', '/=', '%=', '^=', '|=',
'<<', '>>', '<=', '>='):
# 0123456
tokens = self.get_tokens('a %s b' % operator)
self.assertEqual(3, len(tokens), tokens)
Expand Down