/
gdllex.py
67 lines (55 loc) · 971 Bytes
/
gdllex.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
from ply import lex
tokens = (
'VAR',
'NAME',
'NUM',
# 'RELATION',
'ARROW',
'OR',
'NOT',
# 'DISTINCT',
'RPAREN',
'LPAREN',
'COMMENT',
)
t_VAR = r'\?[a-zA-Z_][-\w_]*'
t_ARROW = r'<='
t_LPAREN = r'\('
t_RPAREN = r'\)'
t_ignore = ' \t'
def t_NAME(t):
r'([-\w_]+)|<|>|>=|\+|\-|\*|/'
try:
t.value = int(t.value)
t.type = 'NUM'
return t
except ValueError:
# not a number
if t.value.lower() == 'or':
t.type = 'OR'
elif t.value.lower() == 'not':
t.type = 'NOT'
# elif t.value.lower() == 'distinct':
# t.type = 'DISTINCT'
# elif t.value in relations:
# t.type = 'RELATION'
else:
t.type = 'NAME'
return t
def t_COMMENT(t):
r';.*'
pass
def t_newline(t):
r'\n+'
t.lexer.lineno += t.value.count('\n')
def t_error(t):
print "Illegal character '%s'" % t.value[0]
t.skip(1)
lex.lex()
gdl = open('parse_test.kif').read()
lex.input(gdl)
#while 1:
# tok = lex.token()
# if not tok:
# break
# print tok