Skip to content

Commit

Permalink
Merge pull request #17 from r3/master
Browse files Browse the repository at this point in the history
Freshly baked tests
  • Loading branch information
Havvy committed Dec 3, 2012
2 parents f1d10d2 + f097186 commit 8e1da1b
Show file tree
Hide file tree
Showing 4 changed files with 94 additions and 50 deletions.
2 changes: 1 addition & 1 deletion examples/escape code.flux
@@ -1 +1 @@
\Hi() @template(arg) and a %variable
\Hi() @template(arg) and a %variable
6 changes: 4 additions & 2 deletions lexer.py
Expand Up @@ -73,8 +73,10 @@ def lookup(iterable):
def pprint_token_stream(stream):
for token in stream:
consumed = '(\\n)' if token.consumed == '\n' else token.consumed
print("{}:: {}".format(token.name(), consumed))

print("{}:: {} @ ({}, {})".format(token.name(),
consumed,
token.start,
token.end))

if __name__ == '__main__':
parser = argparse.ArgumentParser(description="""Description here""")
Expand Down
119 changes: 73 additions & 46 deletions tests/test_Lexer.py
Expand Up @@ -2,9 +2,8 @@
import pytest
import os

from FTL.tokens import *
from FTL.Lexer import *
from FTL import Lexer
from FTL import lexer
from FTL import tokens

# Example files
examples = '../examples'
Expand All @@ -13,46 +12,91 @@
# for a number of example files
test_result_pairs = (
(os.path.join(examples, 'complex function.flux'),
[AT(consumed='@'), TEXT(consumed='outer'), OPEN_PAREN(consumed='('),
AT(consumed='@'), TEXT(consumed='inner'), OPEN_PAREN(consumed='('),
TEXT(consumed='"anonymous quoted parameter"'), CLOSE_PAREN(consumed=')'),
COMMA(consumed=','), TEXT(consumed=' name'), EQUALS(consumed='='),
TEXT(consumed='value'), CLOSE_PAREN(consumed=')')]),
[tokens.AT(consumed='@', start=0, end=1),
tokens.TEXT(consumed='outer', start=1, end=6),
tokens.OPEN_PAREN(consumed='(', start=6, end=7),
tokens.AT(consumed='@', start=7, end=8),
tokens.TEXT(consumed='inner', start=8, end=13),
tokens.OPEN_PAREN(consumed='(', start=13, end=14),
tokens.TEXT(consumed='"anonymous quoted parameter"', start=14, end=42),
tokens.CLOSE_PAREN(consumed=')', start=42, end=43),
tokens.COMMA(consumed=',', start=43, end=44),
tokens.TEXT(consumed=' name', start=44, end=49),
tokens.EQUALS(consumed='=', start=49, end=50),
tokens.TEXT(consumed='value', start=50, end=55),
tokens.CLOSE_PAREN(consumed=')', start=55, end=56)]),

(os.path.join(examples, 'escape code.flux'),
[ESCAPED(consumed='\\H'), TEXT(consumed='i'), OPEN_PAREN(consumed='('),
CLOSE_PAREN(consumed=')'), TEXT(consumed=' '), AT(consumed='@'),
TEXT(consumed='template'), OPEN_PAREN(consumed='('),
TEXT(consumed='arg'), CLOSE_PAREN(consumed=')'),
TEXT(consumed=' and a '), VARIABLE(consumed='%variable'),
TEXT(consumed=' or '), VARIABLE(consumed='%v\\ ar\\ iable')]),
[tokens.ESCAPED(consumed='\\H', start=0, end=2),
tokens.TEXT(consumed='i', start=2, end=3),
tokens.OPEN_PAREN(consumed='(', start=3, end=4),
tokens.CLOSE_PAREN(consumed=')', start=4, end=5),
tokens.TEXT(consumed=' ', start=5, end=6),
tokens.AT(consumed='@', start=6, end=7),
tokens.TEXT(consumed='template', start=7, end=15),
tokens.OPEN_PAREN(consumed='(', start=15, end=16),
tokens.TEXT(consumed='arg', start=16, end=19),
tokens.CLOSE_PAREN(consumed=')', start=19, end=20),
tokens.TEXT(consumed=' and a ', start=20, end=27),
tokens.VARIABLE(consumed='%variable', start=27, end=36),
tokens.NEW_LINE(consumed='\n', start=36, end=37)]),

(os.path.join(examples, 'function.flux'),
[AT(consumed='@'), TEXT(consumed='function'),
OPEN_PAREN(consumed='('), CLOSE_PAREN(consumed=')')]),
[tokens.AT(consumed='@', start=0, end=1),
tokens.TEXT(consumed='function', start=1, end=9),
tokens.OPEN_PAREN(consumed='(', start=9, end=10),
tokens.CLOSE_PAREN(consumed=')', start=10, end=11)]),

(os.path.join(examples, 'function with arguments.flux'),
[AT(consumed='@'), TEXT(consumed='template'), OPEN_PAREN(consumed='('),
TEXT(consumed='testamajin'), COMMA(consumed=','),
TEXT(consumed=' argument1 '), EQUALS(consumed='='), TEXT(consumed=' 23'),
CLOSE_PAREN(consumed=')'), NEW_LINE(consumed='\n')]),
[tokens.AT(consumed='@', start=0, end=1),
tokens.TEXT(consumed='template', start=1, end=9),
tokens.OPEN_PAREN(consumed='(', start=9, end=10),
tokens.TEXT(consumed='testamajin', start=10, end=20),
tokens.COMMA(consumed=',', start=20, end=21),
tokens.TEXT(consumed=' argument1 ', start=21, end=32),
tokens.EQUALS(consumed='=', start=32, end=33),
tokens.TEXT(consumed=' 23', start=33, end=36),
tokens.CLOSE_PAREN(consumed=')', start=36, end=37),
tokens.NEW_LINE(consumed='\n', start=37, end=38)]),

(os.path.join(examples, 'link.flux'),
[OPEN_LINK(consumed='[['),
TEXT(consumed='http://testamjest.com| testamajest'),
CLOSE_LINK(consumed=']]'), NEW_LINE(consumed='\n')]),
[tokens.OPEN_ILINK(consumed='[[', start=0, end=2),
tokens.TEXT(consumed='http://testamjest.com| testamajest',
start=2, end=36),
tokens.CLOSE_ILINK(consumed=']]', start=36, end=38),
tokens.NEW_LINE(consumed='\n', start=38, end=39)]),

(os.path.join(examples, 'nested function.flux'),
[OPEN_PAREN(consumed='('), AT(consumed='@'), TEXT(consumed='A'),
OPEN_PAREN(consumed='('), AT(consumed='@'), TEXT(consumed='B'),
OPEN_PAREN(consumed='('), CLOSE_PAREN(consumed=')'),
CLOSE_PAREN(consumed=')'), CLOSE_PAREN(consumed=')'),
NEW_LINE(consumed='\n')]))
[tokens.OPEN_PAREN(consumed='(', start=0, end=1),
tokens.AT(consumed='@', start=1, end=2),
tokens.TEXT(consumed='A', start=2, end=3),
tokens.OPEN_PAREN(consumed='(', start=3, end=4),
tokens.AT(consumed='@', start=4, end=5),
tokens.TEXT(consumed='B', start=5, end=6),
tokens.OPEN_PAREN(consumed='(', start=6, end=7),
tokens.CLOSE_PAREN(consumed=')', start=7, end=8),
tokens.CLOSE_PAREN(consumed=')', start=8, end=9),
tokens.CLOSE_PAREN(consumed=')', start=9, end=10),
tokens.NEW_LINE(consumed='\n', start=10, end=11)]))


# Facilities for use when debugging errors in tests
def compare_streams(actual, expected):
for act_token, exp_token in zip(actual, expected):
for attrib in ("consumed", "start", "end"):
if getattr(act_token, attrib) != getattr(exp_token, attrib):
print("Expected {}, found {} in {}.{}".format(
getattr(exp_token, attrib),
getattr(act_token, attrib),
type(act_token),
attrib))
else:
print("Good here!")


@pytest.mark.parametrize(('char_stream', 'token_stream'), test_result_pairs)
def test_Tokenize(char_stream, token_stream):
assert tokenize(char_stream) == token_stream
assert lexer.tokenize(char_stream) == token_stream


class TestCharStream():
Expand All @@ -70,20 +114,3 @@ def pytest_funcarg__read(self, request):
"""Creates and returns a temp file for reading"""
return request.cached_setup(self.setup_read, self.teardown_read,
scope='class')

class TestExtractOccurance():
def setup_lst(self):
return list("Give you the constitution again")

def pytest_funcarg__lst(self, request):
"""Creates and returns a temp file for reading"""
return request.cached_setup(self.setup_lst, scope='function')

def test_extract_from_beginning(self, lst):
assert Lexer._extract_occurance(lst, 'Give you') == None
assert lst == list(' the constitution again')

def test_extract_from_middle(self, lst):
assert Lexer._extract_occurance(lst,
'constitution') == 'Give you the '
assert lst == list(" again")
17 changes: 16 additions & 1 deletion tokens.py
@@ -1,5 +1,14 @@
class Token():
"""Flux Tokens
requirements: Python 3
Tokens for use in the Flux lexer
For more information, visit the project's wiki:
http://flux.referata.com/
"""


class Token():
def __init__(self, consumed, start, end):
self.consumed = consumed
self.start = start
Expand All @@ -13,6 +22,12 @@ def equals(cls, other):
def name(cls):
return cls.__name__

def __eq__(self, other):
for attrib in ("pattern", "consumed", "start", "end"):
if getattr(self, attrib) != getattr(other, attrib):
return False
return True


class TEXT(Token):
pattern = "."
Expand Down

0 comments on commit 8e1da1b

Please sign in to comment.