Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/master' into v0.2.0
Browse files Browse the repository at this point in the history
  • Loading branch information
andialbrecht committed Apr 12, 2015
2 parents e038a06 + f775030 commit d463a75
Show file tree
Hide file tree
Showing 6 changed files with 27 additions and 4 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,5 @@ extras/appengine/lib/
extras/py3k/sqlparse
extras/py3k/tests
extras/py3k/sqlparse.diff
extras/py3k/tests.diff
extras/py3k/tests.diff
coverage.xml
2 changes: 2 additions & 0 deletions CHANGES
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ Bug Fixes
* Fix parsing of multi-line comments (issue172, by JacekPliszka).
* Fix parsing of escaped backslashes (issue174, by caseyching).
* Fix parsing of identifiers starting with underscore (issue175).
* Fix misinterpretation of IN keyword (issue183).

Enhancements
* Improve formatting of HAVING statements.
Expand All @@ -17,6 +18,7 @@ Enhancements
* Add support for square bracket array indexing (issue170, issue176,
issue177 by darikg).
* Improve grouping of aliased elements (issue167, by darikg).
* Support comments starting with '#' character (issue178).


Release 0.1.14 (Nov 30, 2014)
Expand Down
1 change: 1 addition & 0 deletions sqlparse/filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,6 +347,7 @@ def _next_token(i):
nl = self.nl()
added.add(nl)
tlist.insert_before(token, nl)
offset += 1
token = _next_token(tlist.token_index(nl) + offset)

def _split_statements(self, tlist):
Expand Down
7 changes: 5 additions & 2 deletions sqlparse/lexer.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,10 +141,10 @@ class Lexer(compat.with_metaclass(LexerMeta)):

tokens = {
'root': [
(r'--.*?(\r\n|\r|\n)', tokens.Comment.Single),
(r'(--|#).*?(\r\n|\r|\n)', tokens.Comment.Single),
# $ matches *before* newline, therefore we have two patterns
# to match Comment.Single
(r'--.*?$', tokens.Comment.Single),
(r'(--|#).*?$', tokens.Comment.Single),
(r'(\r\n|\r|\n)', tokens.Newline),
(r'\s+', tokens.Whitespace),
(r'/\*', tokens.Comment.Multiline, 'multiline-comments'),
Expand All @@ -163,6 +163,9 @@ class Lexer(compat.with_metaclass(LexerMeta)):
# see https://github.com/andialbrecht/sqlparse/pull/64
(r'VALUES', tokens.Keyword),
(r'@[^\W\d_]\w+', tokens.Name),
# IN is special, it may be followed by a parenthesis, but
# is never a functino, see issue183
(r'in\b(?=[ (])?', tokens.Keyword),
(r'[^\W\d_]\w*(?=[.(])', tokens.Name), # see issue39
(r'[-]?0x[0-9a-fA-F]+', tokens.Number.Hexadecimal),
(r'[-]?[0-9]*(\.[0-9]+)?[eE][-]?[0-9]+', tokens.Number.Float),
Expand Down
8 changes: 7 additions & 1 deletion tests/test_grouping.py
Original file line number Diff line number Diff line change
Expand Up @@ -208,6 +208,12 @@ def test_function(self):
self.assert_(isinstance(p.tokens[0], sql.Function))
self.assertEqual(len(list(p.tokens[0].get_parameters())), 2)

def test_function_not_in(self): # issue183
p = sqlparse.parse('in(1, 2)')[0]
self.assertEqual(len(p.tokens), 2)
self.assertEqual(p.tokens[0].ttype, T.Keyword)
self.assert_(isinstance(p.tokens[1], sql.Parenthesis))

def test_varchar(self):
p = sqlparse.parse('"text" Varchar(50) NOT NULL')[0]
self.assert_(isinstance(p.tokens[2], sql.Function))
Expand Down Expand Up @@ -386,4 +392,4 @@ def test_aliased_function_without_as():
def test_aliased_literal_without_as():
p = sqlparse.parse('1 foo')[0].tokens
assert len(p) == 1
assert p[0].get_alias() == 'foo'
assert p[0].get_alias() == 'foo'
10 changes: 10 additions & 0 deletions tests/test_parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -284,3 +284,13 @@ def test_typed_array_definition():
assert names == ['x', 'y', 'z']


@pytest.mark.parametrize('sql', [
'select 1 -- foo',
'select 1 # foo' # see issue178
])
def test_single_line_comments(sql):
p = sqlparse.parse(sql)[0]
assert len(p.tokens) == 5
assert p.tokens[-1].ttype == T.Comment.Single


0 comments on commit d463a75

Please sign in to comment.