Skip to content

Commit

Permalink
Simplify token test assertion helpers.
Browse files Browse the repository at this point in the history
  • Loading branch information
plioi committed Apr 1, 2012
1 parent 0b0b1cd commit 7a4e47f
Show file tree
Hide file tree
Showing 12 changed files with 62 additions and 64 deletions.
8 changes: 8 additions & 0 deletions src/Parsley.Test/CharLexer.cs
@@ -0,0 +1,8 @@
namespace Parsley
{
public class CharLexer : Lexer
{
public CharLexer()
: base(new Pattern("Character", @".")) { }
}
}
8 changes: 0 additions & 8 deletions src/Parsley.Test/CharTokenStream.cs

This file was deleted.

2 changes: 1 addition & 1 deletion src/Parsley.Test/GrammarRuleTests.cs
Expand Up @@ -8,7 +8,7 @@ public class GrammarRuleTests : Grammar
[Fact] [Fact]
public void CanDefineMutuallyRecursiveRules() public void CanDefineMutuallyRecursiveRules()
{ {
var tokens = new CharTokenStream("(A)"); var tokens = new CharLexer().Tokenize(new Text("(A)"));
var expression = new GrammarRule<string>(); var expression = new GrammarRule<string>();
var alpha = new GrammarRule<string>(); var alpha = new GrammarRule<string>();
var parenthesizedExpresion = new GrammarRule<string>(); var parenthesizedExpresion = new GrammarRule<string>();
Expand Down
27 changes: 14 additions & 13 deletions src/Parsley.Test/GrammarTests.cs
@@ -1,24 +1,25 @@
using System; using System;
using System.Collections.Generic;
using Should; using Should;
using Xunit; using Xunit;


namespace Parsley namespace Parsley
{ {
public class GrammarTests : Grammar public class GrammarTests : Grammar
{ {
private static TokenStream Tokenize(string source) private static IEnumerable<Token> Tokenize(string source)
{ {
return new SampleTokenStream(source); return new SampleLexer().Tokenize(new Text(source));
} }


private class SampleTokenStream : TokenStream private class SampleLexer : Lexer
{ {
public static readonly TokenKind Digit = new Pattern("Digit", @"[0-9]"); public static readonly TokenKind Digit = new Pattern("Digit", @"[0-9]");
public static readonly TokenKind Letter = new Pattern("Letter", @"[a-zA-Z]"); public static readonly TokenKind Letter = new Pattern("Letter", @"[a-zA-Z]");
public static readonly TokenKind Symbol = new Pattern("Symbol", @"."); public static readonly TokenKind Symbol = new Pattern("Symbol", @".");


public SampleTokenStream(string source) public SampleLexer()
: base(new Lexer(Digit, Letter, Symbol).Tokenize(new Text(source))) { } : base(Digit, Letter, Symbol) { }
} }


private readonly Parser<Token> A, B, AB, COMMA; private readonly Parser<Token> A, B, AB, COMMA;
Expand Down Expand Up @@ -51,11 +52,11 @@ public void CanDetectTheEndOfInputWithoutAdvancing()
[Fact] [Fact]
public void CanDemandThatAGivenKindOfTokenAppearsNext() public void CanDemandThatAGivenKindOfTokenAppearsNext()
{ {
Token(SampleTokenStream.Letter).Parses(Tokenize("A")).IntoToken("A"); Token(SampleLexer.Letter).Parses(Tokenize("A")).IntoToken("A");
Token(SampleTokenStream.Letter).FailsToParse(Tokenize("0")).LeavingUnparsedTokens("0").WithMessage("(1, 1): Letter expected"); Token(SampleLexer.Letter).FailsToParse(Tokenize("0")).LeavingUnparsedTokens("0").WithMessage("(1, 1): Letter expected");


Token(SampleTokenStream.Digit).FailsToParse(Tokenize("A")).LeavingUnparsedTokens("A").WithMessage("(1, 1): Digit expected"); Token(SampleLexer.Digit).FailsToParse(Tokenize("A")).LeavingUnparsedTokens("A").WithMessage("(1, 1): Digit expected");
Token(SampleTokenStream.Digit).Parses(Tokenize("0")).IntoToken("0"); Token(SampleLexer.Digit).Parses(Tokenize("0")).IntoToken("0");
} }


[Fact] [Fact]
Expand All @@ -77,7 +78,7 @@ public void ApplyingARuleZeroOrMoreTimes()
parser.FailsToParse(Tokenize("ABABA!")).LeavingUnparsedTokens("!").WithMessage("(1, 6): B expected"); parser.FailsToParse(Tokenize("ABABA!")).LeavingUnparsedTokens("!").WithMessage("(1, 6): B expected");


Parser<Token> succeedWithoutConsuming = new LambdaParser<Token>(tokens => new Parsed<Token>(null, tokens)); Parser<Token> succeedWithoutConsuming = new LambdaParser<Token>(tokens => new Parsed<Token>(null, tokens));
Action infiniteLoop = () => ZeroOrMore(succeedWithoutConsuming).Parse(Tokenize("")); Action infiniteLoop = () => ZeroOrMore(succeedWithoutConsuming).Parse(new TokenStream(Tokenize("")));
infiniteLoop.ShouldThrow<Exception>("Parser encountered a potential infinite loop."); infiniteLoop.ShouldThrow<Exception>("Parser encountered a potential infinite loop.");
} }


Expand All @@ -92,7 +93,7 @@ public void ApplyingARuleOneOrMoreTimes()
parser.FailsToParse(Tokenize("ABABA!")).LeavingUnparsedTokens("!").WithMessage("(1, 6): B expected"); parser.FailsToParse(Tokenize("ABABA!")).LeavingUnparsedTokens("!").WithMessage("(1, 6): B expected");


Parser<Token> succeedWithoutConsuming = new LambdaParser<Token>(tokens => new Parsed<Token>(null, tokens)); Parser<Token> succeedWithoutConsuming = new LambdaParser<Token>(tokens => new Parsed<Token>(null, tokens));
Action infiniteLoop = () => OneOrMore(succeedWithoutConsuming).Parse(Tokenize("")); Action infiniteLoop = () => OneOrMore(succeedWithoutConsuming).Parse(new TokenStream(Tokenize("")));
infiniteLoop.ShouldThrow<Exception>("Parser encountered a potential infinite loop."); infiniteLoop.ShouldThrow<Exception>("Parser encountered a potential infinite loop.");
} }


Expand Down Expand Up @@ -183,9 +184,9 @@ public void ImprovingDefaultMessagesWithAKnownExpectation()


public class AlternationTests : Grammar public class AlternationTests : Grammar
{ {
private static TokenStream Tokenize(string source) private static IEnumerable<Token> Tokenize(string source)
{ {
return new CharTokenStream(source); return new CharLexer().Tokenize(new Text(source));
} }


private readonly Parser<Token> A, B, C; private readonly Parser<Token> A, B, C;
Expand Down
4 changes: 2 additions & 2 deletions src/Parsley.Test/IntegrationTests/Json/JsonGrammarTests.cs
Expand Up @@ -6,9 +6,9 @@ namespace Parsley.IntegrationTests.Json
{ {
public class JsonGrammarTests : JsonGrammar public class JsonGrammarTests : JsonGrammar
{ {
private static TokenStream Tokenize(string input) private static IEnumerable<Token> Tokenize(string input)
{ {
return new TokenStream(new JsonLexer().Tokenize(new Text(input))); return new JsonLexer().Tokenize(new Text(input));
} }


[Fact] [Fact]
Expand Down
4 changes: 2 additions & 2 deletions src/Parsley.Test/LambdaParserTests.cs
Expand Up @@ -8,10 +8,10 @@ public class LambdaParserTests
public void CreatesParsersFromLambdas() public void CreatesParsersFromLambdas()
{ {
var succeeds = new LambdaParser<string>(tokens => new Parsed<string>("AA", tokens.Advance().Advance())); var succeeds = new LambdaParser<string>(tokens => new Parsed<string>("AA", tokens.Advance().Advance()));
succeeds.PartiallyParses(new CharTokenStream("AABB")).LeavingUnparsedTokens("B", "B").IntoValue("AA"); succeeds.PartiallyParses(new CharLexer().Tokenize(new Text("AABB"))).LeavingUnparsedTokens("B", "B").IntoValue("AA");


var fails = new LambdaParser<string>(tokens => new Error<string>(tokens, ErrorMessage.Unknown())); var fails = new LambdaParser<string>(tokens => new Error<string>(tokens, ErrorMessage.Unknown()));
fails.FailsToParse(new CharTokenStream("AABB")).LeavingUnparsedTokens("A", "A", "B", "B").WithMessage("(1, 1): Parse error."); fails.FailsToParse(new CharLexer().Tokenize(new Text("AABB"))).LeavingUnparsedTokens("A", "A", "B", "B").WithMessage("(1, 1): Parse error.");
} }
} }
} }
38 changes: 19 additions & 19 deletions src/Parsley.Test/OperatorPrecedenceParserTests.cs
Expand Up @@ -12,21 +12,21 @@ public OperatorPrecedenceParserTests()
{ {
expression = new OperatorPrecedenceParser<Expression>(); expression = new OperatorPrecedenceParser<Expression>();


expression.Atom(SampleTokenStream.Digit, token => new Constant(int.Parse(token.Literal))); expression.Atom(SampleLexer.Digit, token => new Constant(int.Parse(token.Literal)));
expression.Atom(SampleTokenStream.Name, token => new Identifier(token.Literal)); expression.Atom(SampleLexer.Name, token => new Identifier(token.Literal));


expression.Unit(SampleTokenStream.LeftParen, Between(Token("("), expression, Token(")"))); expression.Unit(SampleLexer.LeftParen, Between(Token("("), expression, Token(")")));


expression.Binary(SampleTokenStream.Add, 3, (left, symbol, right) => new Form(symbol, left, right)); expression.Binary(SampleLexer.Add, 3, (left, symbol, right) => new Form(symbol, left, right));
expression.Binary(SampleTokenStream.Subtract, 3, (left, symbol, right) => new Form(symbol, left, right)); expression.Binary(SampleLexer.Subtract, 3, (left, symbol, right) => new Form(symbol, left, right));
expression.Binary(SampleTokenStream.Multiply, 4, (left, symbol, right) => new Form(symbol, left, right)); expression.Binary(SampleLexer.Multiply, 4, (left, symbol, right) => new Form(symbol, left, right));
expression.Binary(SampleTokenStream.Divide, 4, (left, symbol, right) => new Form(symbol, left, right)); expression.Binary(SampleLexer.Divide, 4, (left, symbol, right) => new Form(symbol, left, right));
expression.Binary(SampleTokenStream.Exponent, 5, (left, symbol, right) => new Form(symbol, left, right), Associativity.Right); expression.Binary(SampleLexer.Exponent, 5, (left, symbol, right) => new Form(symbol, left, right), Associativity.Right);
expression.Prefix(SampleTokenStream.Subtract, 6, (symbol, operand) => new Form(new Identifier(symbol.Literal), operand)); expression.Prefix(SampleLexer.Subtract, 6, (symbol, operand) => new Form(new Identifier(symbol.Literal), operand));
expression.Postfix(SampleTokenStream.Increment, 7, (symbol, operand) => new Form(new Identifier(symbol.Literal), operand)); expression.Postfix(SampleLexer.Increment, 7, (symbol, operand) => new Form(new Identifier(symbol.Literal), operand));
expression.Postfix(SampleTokenStream.Decrement, 7, (symbol, operand) => new Form(new Identifier(symbol.Literal), operand)); expression.Postfix(SampleLexer.Decrement, 7, (symbol, operand) => new Form(new Identifier(symbol.Literal), operand));


expression.Extend(SampleTokenStream.LeftParen, 8, callable => expression.Extend(SampleLexer.LeftParen, 8, callable =>
from arguments in Between(Token("("), ZeroOrMore(expression, Token(",")), Token(")")) from arguments in Between(Token("("), ZeroOrMore(expression, Token(",")), Token(")"))
select new Form(callable, arguments)); select new Form(callable, arguments));
} }
Expand Down Expand Up @@ -121,12 +121,12 @@ private void Parses(string input, string expectedTree)
expression.Parses(Tokenize(input)).IntoValue(e => e.ToString().ShouldEqual(expectedTree)); expression.Parses(Tokenize(input)).IntoValue(e => e.ToString().ShouldEqual(expectedTree));
} }


private static TokenStream Tokenize(string source) private static IEnumerable<Token> Tokenize(string source)
{ {
return new SampleTokenStream(source); return new SampleLexer().Tokenize(new Text(source));
} }


private class SampleTokenStream : TokenStream private class SampleLexer : Lexer
{ {
public static readonly TokenKind Digit = new Pattern("Digit", @"[0-9]"); public static readonly TokenKind Digit = new Pattern("Digit", @"[0-9]");
public static readonly TokenKind Name = new Pattern("Name", @"[a-z]+"); public static readonly TokenKind Name = new Pattern("Name", @"[a-z]+");
Expand All @@ -141,10 +141,10 @@ private class SampleTokenStream : TokenStream
public static readonly TokenKind RightParen = new Operator(")"); public static readonly TokenKind RightParen = new Operator(")");
public static readonly TokenKind Comma = new Operator(","); public static readonly TokenKind Comma = new Operator(",");


public SampleTokenStream(string source) public SampleLexer()
: base(new Lexer(Digit, Name, Increment, Decrement, Add, : base(Digit, Name, Increment, Decrement, Add,
Subtract, Multiply, Divide, Exponent, Subtract, Multiply, Divide, Exponent,
LeftParen, RightParen, Comma).Tokenize(new Text(source))) { } LeftParen, RightParen, Comma) { }
} }


private interface Expression private interface Expression
Expand Down
2 changes: 1 addition & 1 deletion src/Parsley.Test/ParsedTests.cs
Expand Up @@ -9,7 +9,7 @@ public class ParsedTests


public ParsedTests() public ParsedTests()
{ {
unparsed = new CharTokenStream("0"); unparsed = new TokenStream(new CharLexer().Tokenize(new Text("0")));
} }


[Fact] [Fact]
Expand Down
14 changes: 8 additions & 6 deletions src/Parsley.Test/ParserQueryTests.cs
@@ -1,4 +1,6 @@
using System; using System;
using System.Collections.Generic;
using System.Linq;
using Xunit; using Xunit;


namespace Parsley namespace Parsley
Expand All @@ -7,9 +9,9 @@ public class ParserQueryTests
{ {
private static readonly Parser<string> Next = new LambdaParser<string>(tokens => new Parsed<string>(tokens.Current.Literal, tokens.Advance())); private static readonly Parser<string> Next = new LambdaParser<string>(tokens => new Parsed<string>(tokens.Current.Literal, tokens.Advance()));


private static TokenStream Tokenize(string source) private static IEnumerable<Token> Tokenize(string source)
{ {
return new CharTokenStream(source); return new CharLexer().Tokenize(new Text(source));
} }


[Fact] [Fact]
Expand Down Expand Up @@ -45,22 +47,22 @@ public void PropogatesErrorsWithoutRunningRemainingParsers()
{ {
Parser<string> Fail = Grammar.Fail<string>(); Parser<string> Fail = Grammar.Fail<string>();


var source = Tokenize("xy"); var tokens = Tokenize("xy").ToArray();


(from _ in Fail (from _ in Fail
from x in Next from x in Next
from y in Next from y in Next
select Tuple.Create(x, y)).FailsToParse(source).LeavingUnparsedTokens("x", "y"); select Tuple.Create(x, y)).FailsToParse(tokens).LeavingUnparsedTokens("x", "y");


(from x in Next (from x in Next
from _ in Fail from _ in Fail
from y in Next from y in Next
select Tuple.Create(x, y)).FailsToParse(source).LeavingUnparsedTokens("y"); select Tuple.Create(x, y)).FailsToParse(tokens).LeavingUnparsedTokens("y");


(from x in Next (from x in Next
from y in Next from y in Next
from _ in Fail from _ in Fail
select Tuple.Create(x, y)).FailsToParse(source).AtEndOfInput(); select Tuple.Create(x, y)).FailsToParse(tokens).AtEndOfInput();
} }
} }
} }
2 changes: 1 addition & 1 deletion src/Parsley.Test/Parsley.Test.csproj
Expand Up @@ -47,7 +47,7 @@
<Link>Properties\CommonAssemblyInfo.cs</Link> <Link>Properties\CommonAssemblyInfo.cs</Link>
</Compile> </Compile>
<Compile Include="AssertionExtensions.cs" /> <Compile Include="AssertionExtensions.cs" />
<Compile Include="CharTokenStream.cs" /> <Compile Include="CharLexer.cs" />
<Compile Include="ErrorMessageListTests.cs" /> <Compile Include="ErrorMessageListTests.cs" />
<Compile Include="ErrorMessageTests.cs" /> <Compile Include="ErrorMessageTests.cs" />
<Compile Include="ErrorTests.cs" /> <Compile Include="ErrorTests.cs" />
Expand Down
5 changes: 0 additions & 5 deletions src/Parsley/Lexer.cs
Expand Up @@ -13,11 +13,6 @@ public Lexer(params TokenKind[] kinds)
this.kinds.Add(TokenKind.Unknown); this.kinds.Add(TokenKind.Unknown);
} }


public IEnumerable<Token> Tokenize(string text)
{
return Tokenize(new Text(text));//TODO: Should we phase out the Text overload so consumer doesn't have to care about Text ever?
}

public IEnumerable<Token> Tokenize(Text text) public IEnumerable<Token> Tokenize(Text text)
{ {
while (!text.EndOfInput) while (!text.EndOfInput)
Expand Down
12 changes: 6 additions & 6 deletions src/Parsley/ParsingAssertions.cs
Expand Up @@ -22,9 +22,9 @@ public static void ShouldEqual(this Token actual, TokenKind expectedKind, string
AssertTokenLiteralsEqual(expectedLiteral, actual.Literal); AssertTokenLiteralsEqual(expectedLiteral, actual.Literal);
} }


public static Reply<T> FailsToParse<T>(this Parser<T> parser, TokenStream tokens) public static Reply<T> FailsToParse<T>(this Parser<T> parser, IEnumerable<Token> tokens)
{ {
var reply = parser.Parse(tokens); var reply = parser.Parse(new TokenStream(tokens));


if (reply.Success) if (reply.Success)
throw new AssertionException("parser failure", "parser completed successfully"); throw new AssertionException("parser failure", "parser completed successfully");
Expand Down Expand Up @@ -52,14 +52,14 @@ public static Reply<T> WithNoMessage<T>(this Reply<T> reply)
return reply; return reply;
} }


public static Reply<T> PartiallyParses<T>(this Parser<T> parser, TokenStream tokens) public static Reply<T> PartiallyParses<T>(this Parser<T> parser, IEnumerable<Token> tokens)
{ {
return parser.Parse(tokens).Succeeds(); return parser.Parse(new TokenStream(tokens)).Succeeds();
} }


public static Reply<T> Parses<T>(this Parser<T> parser, TokenStream tokens) public static Reply<T> Parses<T>(this Parser<T> parser, IEnumerable<Token> tokens)
{ {
return parser.Parse(tokens).Succeeds().AtEndOfInput(); return parser.Parse(new TokenStream(tokens)).Succeeds().AtEndOfInput();
} }


private static Reply<T> Succeeds<T>(this Reply<T> reply) private static Reply<T> Succeeds<T>(this Reply<T> reply)
Expand Down

0 comments on commit 7a4e47f

Please sign in to comment.