Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Lexer.Tokenize(...) receives a string instead of a Text object.

  • Loading branch information...
commit 5bbd8f1fe84fdee1b63ea869ad01af41a0bf6ec6 1 parent 7a4e47f
@plioi authored
View
4 src/Parsley.Test/ErrorTests.cs
@@ -12,8 +12,8 @@ public class ErrorTests
public ErrorTests()
{
var lexer = new Lexer();
- x = new TokenStream(lexer.Tokenize(new Text("x")));
- endOfInput = new TokenStream(lexer.Tokenize(new Text("")));
+ x = new TokenStream(lexer.Tokenize("x"));
+ endOfInput = new TokenStream(lexer.Tokenize(""));
}
[Fact]
View
2  src/Parsley.Test/GrammarRuleTests.cs
@@ -8,7 +8,7 @@ public class GrammarRuleTests : Grammar
[Fact]
public void CanDefineMutuallyRecursiveRules()
{
- var tokens = new CharLexer().Tokenize(new Text("(A)"));
+ var tokens = new CharLexer().Tokenize("(A)");
var expression = new GrammarRule<string>();
var alpha = new GrammarRule<string>();
var parenthesizedExpresion = new GrammarRule<string>();
View
8 src/Parsley.Test/GrammarTests.cs
@@ -7,9 +7,9 @@ namespace Parsley
{
public class GrammarTests : Grammar
{
- private static IEnumerable<Token> Tokenize(string source)
+ private static IEnumerable<Token> Tokenize(string input)
{
- return new SampleLexer().Tokenize(new Text(source));
+ return new SampleLexer().Tokenize(input);
}
private class SampleLexer : Lexer
@@ -184,9 +184,9 @@ public void ImprovingDefaultMessagesWithAKnownExpectation()
public class AlternationTests : Grammar
{
- private static IEnumerable<Token> Tokenize(string source)
+ private static IEnumerable<Token> Tokenize(string input)
{
- return new CharLexer().Tokenize(new Text(source));
+ return new CharLexer().Tokenize(input);
}
private readonly Parser<Token> A, B, C;
View
2  src/Parsley.Test/IntegrationTests/Json/JsonGrammarTests.cs
@@ -8,7 +8,7 @@ public class JsonGrammarTests : JsonGrammar
{
private static IEnumerable<Token> Tokenize(string input)
{
- return new JsonLexer().Tokenize(new Text(input));
+ return new JsonLexer().Tokenize(input);
}
[Fact]
View
2  src/Parsley.Test/IntegrationTests/Json/JsonLexerTests.cs
@@ -9,7 +9,7 @@ public class JsonLexerTests
{
private static IEnumerable<Token> Tokenize(string input)
{
- return new JsonLexer().Tokenize(new Text(input));
+ return new JsonLexer().Tokenize(input);
}
[Fact]
View
4 src/Parsley.Test/LambdaParserTests.cs
@@ -8,10 +8,10 @@ public class LambdaParserTests
public void CreatesParsersFromLambdas()
{
var succeeds = new LambdaParser<string>(tokens => new Parsed<string>("AA", tokens.Advance().Advance()));
- succeeds.PartiallyParses(new CharLexer().Tokenize(new Text("AABB"))).LeavingUnparsedTokens("B", "B").IntoValue("AA");
+ succeeds.PartiallyParses(new CharLexer().Tokenize("AABB")).LeavingUnparsedTokens("B", "B").IntoValue("AA");
var fails = new LambdaParser<string>(tokens => new Error<string>(tokens, ErrorMessage.Unknown()));
- fails.FailsToParse(new CharLexer().Tokenize(new Text("AABB"))).LeavingUnparsedTokens("A", "A", "B", "B").WithMessage("(1, 1): Parse error.");
+ fails.FailsToParse(new CharLexer().Tokenize("AABB")).LeavingUnparsedTokens("A", "A", "B", "B").WithMessage("(1, 1): Parse error.");
}
}
}
View
2  src/Parsley.Test/LexerTests.cs
@@ -19,7 +19,7 @@ public LexerTests()
private IEnumerable<Token> Tokenize(string input)
{
- return new Lexer(lower, upper, space).Tokenize(new Text(input));
+ return new Lexer(lower, upper, space).Tokenize(input);
}
[Fact]
View
4 src/Parsley.Test/OperatorPrecedenceParserTests.cs
@@ -121,9 +121,9 @@ private void Parses(string input, string expectedTree)
expression.Parses(Tokenize(input)).IntoValue(e => e.ToString().ShouldEqual(expectedTree));
}
- private static IEnumerable<Token> Tokenize(string source)
+ private static IEnumerable<Token> Tokenize(string input)
{
- return new SampleLexer().Tokenize(new Text(source));
+ return new SampleLexer().Tokenize(input);
}
private class SampleLexer : Lexer
View
2  src/Parsley.Test/ParsedTests.cs
@@ -9,7 +9,7 @@ public class ParsedTests
public ParsedTests()
{
- unparsed = new TokenStream(new CharLexer().Tokenize(new Text("0")));
+ unparsed = new TokenStream(new CharLexer().Tokenize("0"));
}
[Fact]
View
4 src/Parsley.Test/ParserQueryTests.cs
@@ -9,9 +9,9 @@ public class ParserQueryTests
{
private static readonly Parser<string> Next = new LambdaParser<string>(tokens => new Parsed<string>(tokens.Current.Literal, tokens.Advance()));
- private static IEnumerable<Token> Tokenize(string source)
+ private static IEnumerable<Token> Tokenize(string input)
{
- return new CharLexer().Tokenize(new Text(source));
+ return new CharLexer().Tokenize(input);
}
[Fact]
View
3  src/Parsley/Lexer.cs
@@ -13,8 +13,9 @@ public Lexer(params TokenKind[] kinds)
this.kinds.Add(TokenKind.Unknown);
}
- public IEnumerable<Token> Tokenize(Text text)
+ public IEnumerable<Token> Tokenize(string input)
{
+ var text = new Text(input);
while (!text.EndOfInput)
{
var current = GetToken(text);
View
36 src/Parsley/Text.cs
@@ -6,28 +6,28 @@ namespace Parsley
public class Text
{
private readonly int index;
- private readonly string source;
+ private readonly string input;
private readonly int line;
- public Text(string source)
- : this(NormalizeLineEndings(source), 0, 1) { }
+ public Text(string input)
+ : this(NormalizeLineEndings(input), 0, 1) { }
- private Text(string source, int index, int line)
+ private Text(string input, int index, int line)
{
- this.source = source;
+ this.input = input;
this.index = index;
- if (index > source.Length)
- this.index = source.Length;
+ if (index > input.Length)
+ this.index = input.Length;
this.line = line;
}
public string Peek(int characters)
{
- return index + characters >= source.Length
- ? source.Substring(index)
- : source.Substring(index, characters);
+ return index + characters >= input.Length
+ ? input.Substring(index)
+ : input.Substring(index, characters);
}
public Text Advance(int characters)
@@ -38,24 +38,24 @@ public Text Advance(int characters)
int newIndex = index + characters;
int newLineNumber = line + Peek(characters).Count(ch => ch == '\n');
- return new Text(source, newIndex, newLineNumber);
+ return new Text(input, newIndex, newLineNumber);
}
public bool EndOfInput
{
- get { return index >= source.Length; }
+ get { return index >= input.Length; }
}
public MatchResult Match(TokenRegex regex)
{
- return regex.Match(source, index);
+ return regex.Match(input, index);
}
public MatchResult Match(Predicate<char> test)
{
int i = index;
- while (i < source.Length && test(source[i]))
+ while (i < input.Length && test(input[i]))
i++;
var value = Peek(i - index);
@@ -73,7 +73,7 @@ private int Column
if (index == 0)
return 1;
- int indexOfPreviousNewLine = source.LastIndexOf('\n', index - 1);
+ int indexOfPreviousNewLine = input.LastIndexOf('\n', index - 1);
return index - indexOfPreviousNewLine;
}
}
@@ -85,12 +85,12 @@ public Position Position
public override string ToString()
{
- return source.Substring(index);
+ return input.Substring(index);
}
- private static string NormalizeLineEndings(string source)
+ private static string NormalizeLineEndings(string input)
{
- return source.Replace("\r\n", "\n").Replace('\r', '\n');
+ return input.Replace("\r\n", "\n").Replace('\r', '\n');
}
}
}
Please sign in to comment.
Something went wrong with that request. Please try again.