Skip to content

Commit

Permalink
Add convenience create raw lexer method
Browse files Browse the repository at this point in the history
  • Loading branch information
mdr committed Mar 20, 2011
1 parent e707435 commit 02ed3ed
Showing 1 changed file with 7 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,9 @@ object ScalaLexer {
-1
}

def createRawLexer(s: String, forgiveErrors: Boolean = false): ScalaLexer =
new ScalaLexer(new UnicodeEscapeReader(s, forgiveErrors), forgiveErrors)

def tokeniseFull(file: File): (HiddenTokenInfo, List[Token]) = {
val s = scala.io.Source.fromFile(file).mkString
tokeniseFull(s)
Expand All @@ -71,7 +74,7 @@ object ScalaLexer {
def tokenise(s: String): List[Token] = tokeniseFull(s)._2

def rawTokenise(s: String, forgiveErrors: Boolean = false): List[Token] = {
val lexer = new ScalaLexer(new UnicodeEscapeReader(s, forgiveErrors), forgiveErrors)
val lexer = createRawLexer(s, forgiveErrors)
var actualTokens: List[Token] = Nil
var continue = true
while (continue) {
Expand All @@ -83,6 +86,9 @@ object ScalaLexer {
(actualTokens.tail).reverse
}

/**
* For performance tests only
*/
def rawTokenise2(s: String): List[Token] = {
val lexer = new WhitespaceAndCommentsGrouper(new ScalaLexer(new UnicodeEscapeReader(s)))
var actualTokens: List[Token] = Nil
Expand Down

0 comments on commit 02ed3ed

Please sign in to comment.