Skip to content

Commit

Permalink
Parser optimization: compare Id instances by object identity.
Browse files Browse the repository at this point in the history
Id.__eq__ was near the top of the profile, so we don't want it to be
implemented in Python.

Also:

- Count generated code correctly now that we've moved things.
  • Loading branch information
Andy Chu committed Dec 20, 2017
1 parent 2882a57 commit 18a17ed
Show file tree
Hide file tree
Showing 4 changed files with 10 additions and 15 deletions.
15 changes: 5 additions & 10 deletions core/id_kind.py
Expand Up @@ -24,7 +24,11 @@ def IdName(id_):
return _ID_NAMES[id_.enum_value]


# Save memory by keeping one instance.

# Keep one instance of each Id, to save memory and enable comparison by
# OBJECT IDENTITY.
# Do NOT create any any more instances of them! Always used IdInstance().

# TODO: Fold this into ASDL, which will enforce this?

_ID_INSTANCES = {} # int -> Id
Expand All @@ -44,15 +48,6 @@ class Id(object):
def __init__(self, enum_value):
self.enum_value = enum_value

def __eq__(self, other):
return self.enum_value == other.enum_value

def __ne__(self, other):
return self.enum_value != other.enum_value

def __hash__(self):
return hash(self.enum_value)

def __repr__(self):
return IdName(self)

Expand Down
5 changes: 2 additions & 3 deletions core/id_kind_test.py
Expand Up @@ -61,9 +61,8 @@ def testTokens(self):
self.assertEqual(Kind.BoolBinary, LookupKind(t.id))

def testEquality(self):
# OK WTF!!!!
left = Id(198)
right = Id(198)
left = id_kind.IdInstance(198)
right = id_kind.IdInstance(198)
print(left, right)
print(left == right)
self.assertEqual(left, right)
Expand Down
3 changes: 2 additions & 1 deletion native/fastlex_test.py
Expand Up @@ -11,6 +11,7 @@

import unittest

from core import id_kind
from core.id_kind import Id
from osh import ast_ as ast

Expand All @@ -21,7 +22,7 @@

def MatchToken(lex_mode, line, start_pos):
tok_type, end_pos = fastlex.MatchToken(lex_mode.enum_id, line, start_pos)
return Id(tok_type), end_pos
return id_kind.IdInstance(tok_type), end_pos


def TokenizeLineOuter(line):
Expand Down
2 changes: 1 addition & 1 deletion scripts/count.sh
Expand Up @@ -49,7 +49,7 @@ all() {
echo

echo 'GENERATED CODE'
wc -l _devbuild/*.py _devbuild/gen/* | sort --numeric
wc -l _devbuild/gen/*.{py,h} | sort --numeric
echo

echo 'TOOLS'
Expand Down

0 comments on commit 18a17ed

Please sign in to comment.