Permalink
Browse files

whitespace cleanup

  • Loading branch information...
1 parent fd4509a commit aa29764267c74c9d2e71bca6daa3e582bdd623d9 @smtlaissezfaire smtlaissezfaire committed Apr 4, 2010
Showing with 99 additions and 95 deletions.
  1. +3 −4 README
  2. +32 −27 jslex.js
  3. +64 −64 jslex_test.js
View
7 README
@@ -8,7 +8,6 @@ The TokenDef constructor takes two arguments: a regular expression and a functio
For example, this could be used to recognize a natural number:
- new TokenDef(/[0-9]+/, function (string) {
- return ["int", parseInt(string)];
- })
-
+new TokenDef(/[0-9]+/, function (string) {
+ return ["int", parseInt(string)];
+})
View
@@ -24,51 +24,56 @@
*/
function TokenDef(pattern, callbackfn) {
- this.match = function match(text) {
- var trimmed = text.trimBegin();
- var match = trimmed.match(pattern);
- if (match && match.index == 0) {
+ this.match = function match(text) {
+ var trimmed = text.trimBegin();
+ var match = trimmed.match(pattern);
+ if (match && match.index == 0) {
var newText = trimmed.substr(match[0].length);
var token = callbackfn(match[0]);
- return { success: true,
- token: token,
- text: newText};
- } else {
+
+ return {
+ success: true,
+ token: token,
+ text: newText
+ };
+ } else {
return { success: false };
- }
- };
+ }
+ };
}
TokenDef.ignore = function (string) {
- return null;
+ return null;
};
TokenDef.always = function (token) {
- return function (string) {
- return token;
- };
+ return function (string) {
+ return token;
+ };
};
TokenDef.identitiy = function (string) {
- return string;
+ return string;
};
-
+
function jsLex(tokenDefs, text) {
- text = text.trim();
- var tokenStream = [];
- var tokenCount = tokenDefs.length;
-outer: while (text.length > 0) {
- for (var i = 0; i < tokenCount; i++) {
+ text = text.trim();
+ var tokenStream = [];
+ var tokenCount = tokenDefs.length;
+
+ outer: while (text.length > 0) {
+ for (var i = 0; i < tokenCount; i++) {
var tokenType = tokenDefs[i];
var match = tokenType.match(text);
+
if (match.success) {
- match.token && tokenStream.push(match.token);
- text = match.text.trim();
- continue outer;
+ match.token && tokenStream.push(match.token);
+ text = match.text.trim();
+ continue outer;
}
- }
- throw Error("Can't tokenize string " + text);
}
+ throw Error("Can't tokenize string " + text);
+ }
- return tokenStream;
+ return tokenStream;
}
View
@@ -26,95 +26,95 @@
load("jslex.js");
function testTokenMatch() {
- var result = true;
-
- var matchResult = new TokenDef(/[0-9]/, parseInt).match("123");
- result = matchResult.success && result;
- result = matchResult.token == 1 && result;
- result = matchResult.text == "23" && result;
-
- matchResult = new TokenDef(/[0-9]/, parseInt).match("a123");
- result = !matchResult.success && result;
-
- matchResult = new TokenDef(/[0-9]+/, parseInt).match("12,");
- result = matchResult.success && result;
- result = matchResult.token == 12 && result;
- result = matchResult.text == "," && result;
-
- return result;
+ var result = true;
+
+ var matchResult = new TokenDef(/[0-9]/, parseInt).match("123");
+ result = matchResult.success && result;
+ result = matchResult.token == 1 && result;
+ result = matchResult.text == "23" && result;
+
+ matchResult = new TokenDef(/[0-9]/, parseInt).match("a123");
+ result = !matchResult.success && result;
+
+ matchResult = new TokenDef(/[0-9]+/, parseInt).match("12,");
+ result = matchResult.success && result;
+ result = matchResult.token == 12 && result;
+ result = matchResult.text == "," && result;
+
+ return result;
}
function testTokenDef() {
- var result = true;
+ var result = true;
- result = testTokenMatch() && result;
+ result = testTokenMatch() && result;
- if (result) {
- print("Token definition tests succeeded.");
- } else {
- print("Token definition tests failed.");
- }
- return result;
+ if (result) {
+ print("Token definition tests succeeded.");
+ } else {
+ print("Token definition tests failed.");
+ }
+ return result;
}
function testIntLexer() {
- var result = true;
+ var result = true;
+
+ var tokenRules = [new TokenDef(/[0-9]+/, parseInt)];
+ var tokens = jsLex(tokenRules, "123 456 789");
- var tokenRules = [new TokenDef(/[0-9]+/, parseInt)];
- var tokens = jsLex(tokenRules, "123 456 789");
+ result = tokens[0] == 123 && result;
+ result = tokens[1] == 456 && result;
+ result = tokens[2] == 789 && result;
- result = tokens[0] == 123 && result;
- result = tokens[1] == 456 && result;
- result = tokens[2] == 789 && result;
-
- return result;
+ return result;
}
function testIntOrIdentifierLexer() {
- var result = true;
- var tokenRules = [
- new TokenDef(/[0-9]+/, function (string) {
- return ["int", parseInt(string, "10")];
- }),
- new TokenDef(/[a-zA-Z][a-zA-Z0-9_$]*/, function (string) {
- return ["ident", string];
- })
+ var result = true;
+ var tokenRules = [
+ new TokenDef(/[0-9]+/, function (string) {
+ return ["int", parseInt(string, "10")];
+ }),
+ new TokenDef(/[a-zA-Z][a-zA-Z0-9_$]*/, function (string) {
+ return ["ident", string];
+ })
];
- var tokens = jsLex(tokenRules, "123 plus P4");
-
- result = tokens[0][0] == "int" && tokens[0][1] == 123 && result;
- result = tokens[1][0] == "ident" && tokens[1][1] == "plus" && result;
- result = tokens[2][0] == "ident" && tokens[2][1] == "P4" && result;
+ var tokens = jsLex(tokenRules, "123 plus P4");
+
+ result = tokens[0][0] == "int" && tokens[0][1] == 123 && result;
+ result = tokens[1][0] == "ident" && tokens[1][1] == "plus" && result;
+ result = tokens[2][0] == "ident" && tokens[2][1] == "P4" && result;
- return result;
+ return result;
}
function testLexer() {
- var result = true;
+ var result = true;
- result = testIntLexer() && result;
- result = testIntOrIdentifierLexer() && result;
+ result = testIntLexer() && result;
+ result = testIntOrIdentifierLexer() && result;
- if (result) {
- print("Lexer tests succeeded.");
- } else {
- print("Lexer tests failed.");
- }
- return result;
+ if (result) {
+ print("Lexer tests succeeded.");
+ } else {
+ print("Lexer tests failed.");
+ }
+ return result;
}
function testAll() {
- var result = true;
+ var result = true;
- result = testTokenDef() && result;
- result = testLexer() && result;
+ result = testTokenDef() && result;
+ result = testLexer() && result;
- if (result) {
- print("All tests succeeded.");
- } else {
- print("Some tests failed.");
- }
- return result;
+ if (result) {
+ print("All tests succeeded.");
+ } else {
+ print("Some tests failed.");
+ }
+ return result;
}
testAll();

0 comments on commit aa29764

Please sign in to comment.