Skip to content

Commit

Permalink
add lexer tests
Browse files Browse the repository at this point in the history
  • Loading branch information
Josep M. Bach committed Sep 11, 2011
1 parent add31a8 commit f4f7a3c
Show file tree
Hide file tree
Showing 6 changed files with 154 additions and 1 deletion.
7 changes: 7 additions & 0 deletions Rakefile
Expand Up @@ -19,3 +19,10 @@ task :regenerate do
puts "Or just type `bundle install`." puts "Or just type `bundle install`."
end end
end end

require 'rake/testtask'
Rake::TestTask.new do |t|
t.libs << "test"
t.test_files = FileList['test/**/*_test.rb']
t.verbose = true
end
2 changes: 1 addition & 1 deletion bin/noscript
Expand Up @@ -26,7 +26,7 @@ if args.any?{|a| a =~ '--tokens'}
puts "-----TOKENS-----" puts "-----TOKENS-----"
lexer = Noscript::Parser.new lexer = Noscript::Parser.new
lexer.load_file filename lexer.load_file filename
while token = rex.next_token while token = lexer.next_token
p token p token
end end
end end
Expand Down
2 changes: 2 additions & 0 deletions noscript.gemspec
Expand Up @@ -16,6 +16,8 @@ Gem::Specification.new do |s|


s.add_development_dependency 'rexical' s.add_development_dependency 'rexical'
s.add_development_dependency 'racc' s.add_development_dependency 'racc'
s.add_development_dependency 'minitest'
s.add_development_dependency 'purdytest'


s.files = `git ls-files`.split("\n") s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n") s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
Expand Down
59 changes: 59 additions & 0 deletions test/lexer/integration_test.rb
@@ -0,0 +1,59 @@
require 'test_helper'

class LexerIntegrationTest < MiniTest::Unit::TestCase

def test_assignment
tokenizes "foo = 'bar'", [
[:IDENTIFIER, 'foo'],
[:ASSIGN, '='],
[:STRING, 'bar'],
]
end

def test_arithmetic_assignment
tokenizes "foo = (3 + 4) * 2", [
[:IDENTIFIER, 'foo'],
[:ASSIGN, '='],
[:LPAREN, '('],
[:DIGIT, 3],
['+', '+'],
[:DIGIT, 4],
[:RPAREN, ')'],
['*', '*'],
[:DIGIT, 2],
]
end

def test_def_single_line
tokenizes "def foo(bar, baz); 'lorem'; end", [
[:DEF, 'def'],
[:IDENTIFIER, 'foo'],
[:LPAREN, '('],
[:IDENTIFIER, 'bar'],
[:COMMA, ','],
[:IDENTIFIER, 'baz'],
[:RPAREN, ')'],
[:SEMICOLON, ';'],
[:STRING, "lorem"],
[:SEMICOLON, ';'],
[:END, "end"],
]
end

def test_def_multiline
tokenizes "def foo(bar, baz)\n 'lorem'\n end", [
[:DEF, 'def'],
[:IDENTIFIER, 'foo'],
[:LPAREN, '('],
[:IDENTIFIER, 'bar'],
[:COMMA, ','],
[:IDENTIFIER, 'baz'],
[:RPAREN, ')'],
[:NEWLINE, "\n "],
[:STRING, "lorem"],
[:NEWLINE, "\n "],
[:END, "end"],
]
end

end
66 changes: 66 additions & 0 deletions test/lexer/lexer_test.rb
@@ -0,0 +1,66 @@
require 'test_helper'

class LexerTest < MiniTest::Unit::TestCase

def test_newline
tokenizes "\n", [[:NEWLINE, "\n"]]
tokenizes "\n ", [[:NEWLINE, "\n "]]
tokenizes "\n\n ", [[:NEWLINE, "\n\n "]]
tokenizes "\n\n ", [[:NEWLINE, "\n\n "]]
end

def test_ignores_whitespace
tokenizes " ", []
tokenizes ", ", [[:COMMA, ',']]
end

def test_digit
tokenizes "3", [[:DIGIT, 3]]
tokenizes "3234", [[:DIGIT, 3234]]
end

def test_assign
tokenizes "=", [[:ASSIGN, '=']]
end

def test_comma
tokenizes ",", [[:COMMA, ',']]
end

def test_semicolon
tokenizes ";", [[:SEMICOLON, ';']]
end

def test_string
tokenizes "'foo'", [[:STRING, 'foo']]
end

def test_def
tokenizes "def", [[:DEF, 'def']]
end

def test_lparen
tokenizes "(", [[:LPAREN, '(']]
end

def test_rparen
tokenizes ")", [[:RPAREN, ')']]
end

def test_end
tokenizes "end", [[:END, 'end']]
end

def test_identifier
tokenizes "hello", [[:IDENTIFIER, 'hello']]
tokenizes "hello_world", [[:IDENTIFIER, 'hello_world']]
tokenizes "_hey", [[:IDENTIFIER, '_hey']]
end

def test_everything_else
%w(+ - * / ^ &).each do |symbol|
tokenizes symbol, [[symbol, symbol]]
end
end

end
19 changes: 19 additions & 0 deletions test/test_helper.rb
@@ -0,0 +1,19 @@
gem 'minitest'
require 'minitest/unit'
require 'minitest/autorun'
require 'purdytest'

require 'noscript'

class MiniTest::Unit::TestCase
def tokenizes(input, expected)
lexer = Noscript::Parser.new
lexer.scan_setup(input)
tokens = []
while token = lexer.next_token
tokens << token
end

assert_equal expected, tokens
end
end

0 comments on commit f4f7a3c

Please sign in to comment.