Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

merge

--HG--
rename : requires.txt => requirements.txt
  • Loading branch information...
commit d1c6721dba441361c3adbcea5172dc0416e9b6ba 2 parents f955c70 + 467799b
@vlasovskikh vlasovskikh authored
View
2  .hgignore
@@ -2,11 +2,13 @@ syntax:glob
MANIFEST
build
dist
+tags
tmp
tags
.idea
funcparserlib.egg-info
examples/dot/*.png
+.idea
*.pyc
*.swp
View
1  .hgtags
@@ -5,3 +5,4 @@ e5f9249d65bc6c9f8a5d219e31c3abd30167588a 0.3.1
23cf9ed9635b112fcb2247d960f4a918eaaad4bc 0.3.2
dfcce3e0b5765a393489570a41c7c95f5d8f1b15 0.3.3
ce8269e2e9ad9795f089b397efbec33758337016 0.3.4
+2a094772c9e97ee5de5618f9c4479253fb357e4e 0.3.5
View
11 CHANGES
@@ -3,6 +3,17 @@ The Changelog
This is a changelog of [funcparserlib][1].
+
+0.3.5, 2011-01-13
+-----------------
+
+A maintenance release.
+
+* Python 2.4 compatibility
+* More readable terminal names for error reporting
+* Fixed wrong token positions in lexer error messages
+
+
0.3.4, 2009-10-06
-----------------
View
2  examples/json/Makefile
@@ -1,4 +1,4 @@
-.PHONY = test
+all: test
test:
nosetests -v
View
3  src/funcparserlib/parser.py
@@ -126,7 +126,8 @@ def parse(self, tokens):
max = e.state.max
tok = tokens[max] if max < len(tokens) else 'eof'
raise ParserError(u'%s: %s' % (e.msg, tok),
- getattr(tok, 'pos', None))
+ getattr(tok, 'pos', None),
+ max)
def __call__(self, tokens, s):
return GrammarError('an abstract parser cannot be called')
View
13 src/funcparserlib/util.py
@@ -23,8 +23,8 @@
class SyntaxError(Exception):
'The base class for funcparserlib errors.'
- def __init__(self, msg, pos=None):
- Exception.__init__(self, msg, pos)
+ def __init__(self, msg, pos, index=None):
+ Exception.__init__(self, msg, pos, index)
@property
def pos(self):
@@ -52,9 +52,12 @@ def rec(x, indent, sym):
if len(xs) == 0:
return line
else:
- next_indent = indent + (
- CONT if sym == MID
- else (ROOT if sym == ROOT else LAST))
+ if sym == MID:
+ next_indent = indent + (CONT)
+ elif sym == ROOT:
+ next_indent = indent + (ROOT)
+ else:
+ next_indent = indent + (LAST)
syms = [MID] * (len(xs) - 1) + [END]
lines = [rec(x, next_indent, sym) for x, sym in zip(xs, syms)]
return '\n'.join([line] + lines)
View
51 tests/test_parsing.py
@@ -1,5 +1,9 @@
-from nose.tools import eq_
-from funcparserlib.parser import a, many
+# -*- coding: utf-8 -*-
+
+import re
+from nose.tools import eq_, ok_
+from funcparserlib.lexer import make_tokenizer, Spec, LexerError, Token
+from funcparserlib.parser import a, many, tok, skip, eof, ParserError
# Issue 31
def test_many_backtracking():
@@ -8,3 +12,46 @@ def test_many_backtracking():
expr = many(x + y) + x + x
eq_(expr.parse('xyxyxx'), ([('x', 'y'), ('x', 'y')], 'x', 'x'))
+# Issue 14
+def test_error_info():
+ tokenize = make_tokenizer([
+ Spec('keyword', r'(is|end)'),
+ Spec('id', r'[a-z]+'),
+ Spec('space', r'[ \t]+'),
+ Spec('nl', r'[\n\r]+'),
+ ])
+ try:
+ list(tokenize(u'f is ф'))
+ except LexerError, e:
+ eq_(unicode(e), u'1,6-1,6: cannot tokenize data: "f is \u0444"')
+ else:
+ ok_(False, 'must raise LexerError')
+
+ keyword = lambda s: tok('keyword', s)
+
+ id = tok('id')
+ is_ = keyword('is')
+ end = keyword('end')
+ nl = tok('nl')
+
+ equality = id + skip(is_) + id >> tuple
+ expr = equality + skip(nl)
+ file = many(expr) + end
+
+ msg = """\
+spam is eggs
+eggs isnt spam
+end"""
+ toks = [x for x in tokenize(msg) if x.type != 'space']
+ try:
+ file.parse(toks)
+ except ParserError, e:
+ msg, pos, i = e.args
+ eq_(msg, u"got unexpected token: id 'spam'")
+ eq_(pos, ((2, 11), (2, 14)))
+ # May raise KeyError
+ t = toks[i]
+ eq_(t, Token('id', 'spam'))
+ else:
+ ok_(False, 'must raise ParserError')
+
Please sign in to comment.
Something went wrong with that request. Please try again.