Skip to content

Commit

Permalink
Add the support for token delegator.
Browse files Browse the repository at this point in the history
The third parameter to the `tokenize()` function is a callback function
that is invoked every time the tokenizer encounters a new token. The
return value of this callback function will be appended to the list of
tokens to be returned by `tokenize()`.

An example that returns ['answer', '=', '42']:

  esprima.tokenize('answer = 42', { range: true }, function (token) {
    return token.value;
  });

Fixes #1332
Closes gh-1342
  • Loading branch information
ariya committed Oct 14, 2015
1 parent aa0066b commit 02a5ead
Show file tree
Hide file tree
Showing 9 changed files with 85 additions and 41 deletions.
18 changes: 15 additions & 3 deletions esprima.js
Expand Up @@ -1643,10 +1643,21 @@
flags: token.regex.flags
};
}
extra.tokens.push(entry);
if (extra.tokenValues) {
extra.tokenValues.push((entry.type === 'Punctuator' || entry.type === 'Keyword') ? entry.value : null);
}
if (extra.tokenize) {
if (!extra.range) {
delete entry.range;
}
if (!extra.loc) {
delete entry.loc;
}
if (extra.delegate) {
entry = extra.delegate(entry);
}
}
extra.tokens.push(entry);
}

return token;
Expand Down Expand Up @@ -5519,7 +5530,7 @@
extra.tokens = tokens;
}

function tokenize(code, options) {
function tokenize(code, options, delegate) {
var toString,
tokens;

Expand Down Expand Up @@ -5558,6 +5569,8 @@
extra.tokens = [];
extra.tokenValues = [];
extra.tokenize = true;
extra.delegate = delegate;

// The following two fields are necessary to compute the Regex tokens.
extra.openParenToken = -1;
extra.openCurlyToken = -1;
Expand Down Expand Up @@ -5594,7 +5607,6 @@
}
}

filterTokenLocation();
tokens = extra.tokens;
if (typeof extra.comments !== 'undefined') {
tokens.comments = extra.comments;
Expand Down
10 changes: 5 additions & 5 deletions test/fixtures/tokenize/migrated_0000.tokens.json
Expand Up @@ -38,10 +38,6 @@
{
"type": "RegularExpression",
"value": "/42/",
"regex": {
"pattern": "42",
"flags": ""
},
"range": [
9,
13
Expand All @@ -55,6 +51,10 @@
"line": 1,
"column": 13
}
},
"regex": {
"pattern": "42",
"flags": ""
}
},
{
Expand All @@ -75,4 +75,4 @@
}
}
}
]
]
10 changes: 5 additions & 5 deletions test/fixtures/tokenize/migrated_0001.tokens.json
Expand Up @@ -92,10 +92,6 @@
{
"type": "RegularExpression",
"value": "/42/",
"regex": {
"pattern": "42",
"flags": ""
},
"range": [
13,
17
Expand All @@ -109,6 +105,10 @@
"line": 1,
"column": 17
}
},
"regex": {
"pattern": "42",
"flags": ""
}
},
{
Expand All @@ -129,4 +129,4 @@
}
}
}
]
]
10 changes: 5 additions & 5 deletions test/fixtures/tokenize/migrated_0002.tokens.json
Expand Up @@ -74,10 +74,6 @@
{
"type": "RegularExpression",
"value": "/42/",
"regex": {
"pattern": "42",
"flags": ""
},
"range": [
13,
17
Expand All @@ -91,6 +87,10 @@
"line": 1,
"column": 17
}
},
"regex": {
"pattern": "42",
"flags": ""
}
}
]
]
10 changes: 5 additions & 5 deletions test/fixtures/tokenize/migrated_0004.tokens.json
Expand Up @@ -110,10 +110,6 @@
{
"type": "RegularExpression",
"value": "/42/",
"regex": {
"pattern": "42",
"flags": ""
},
"range": [
15,
19
Expand All @@ -127,6 +123,10 @@
"line": 1,
"column": 19
}
},
"regex": {
"pattern": "42",
"flags": ""
}
}
]
]
10 changes: 5 additions & 5 deletions test/fixtures/tokenize/migrated_0008.tokens.json
Expand Up @@ -128,10 +128,6 @@
{
"type": "RegularExpression",
"value": "/42/",
"regex": {
"pattern": "42",
"flags": ""
},
"range": [
16,
20
Expand All @@ -145,6 +141,10 @@
"line": 1,
"column": 20
}
},
"regex": {
"pattern": "42",
"flags": ""
}
}
]
]
10 changes: 5 additions & 5 deletions test/fixtures/tokenize/migrated_0009.tokens.json
Expand Up @@ -20,10 +20,6 @@
{
"type": "RegularExpression",
"value": "/42/",
"regex": {
"pattern": "42",
"flags": ""
},
"range": [
5,
9
Expand All @@ -37,6 +33,10 @@
"line": 1,
"column": 9
}
},
"regex": {
"pattern": "42",
"flags": ""
}
}
]
]
10 changes: 5 additions & 5 deletions test/fixtures/tokenize/migrated_0010.tokens.json
Expand Up @@ -2,10 +2,6 @@
{
"type": "RegularExpression",
"value": "/42/",
"regex": {
"pattern": "42",
"flags": ""
},
"range": [
0,
4
Expand All @@ -19,6 +15,10 @@
"line": 1,
"column": 4
}
},
"regex": {
"pattern": "42",
"flags": ""
}
}
]
]
38 changes: 35 additions & 3 deletions test/utils/evaluate-testcase.js
Expand Up @@ -191,7 +191,7 @@

function testTokenize(code, tokens) {
'use strict';
var options, expected, actual, tree;
var options, expected, actual, list, entries, types;

options = {
comment: true,
Expand All @@ -203,14 +203,46 @@
expected = JSON.stringify(tokens, null, 4);

try {
tree = esprima.tokenize(code, options);
actual = JSON.stringify(tree, null, 4);
list = esprima.tokenize(code, options);
actual = JSON.stringify(list, null, 4);
} catch (e) {
throw new NotMatchingError(expected, e.toString());
}
if (expected !== actual) {
throw new NotMatchingError(expected, actual);
}

// Use the delegate to collect the token separately.
try {
entries = [];
esprima.tokenize(code, options, function (token) {
entries.push(token);
return token;
});
actual = JSON.stringify(entries, null, 4);
} catch (e) {
throw new NotMatchingError(expected, e.toString());
}
if (expected !== actual) {
throw new NotMatchingError(expected, actual);
}

// Use the delegate to filter the token type.
try {
entries = esprima.tokenize(code, options, function (token) {
return token.type;
});
actual = JSON.stringify(entries, null, 4);
} catch (e) {
throw new NotMatchingError(expected, e.toString());
}
types = tokens.map(function (t) {
return t.type;
});
expected = JSON.stringify(types, null, 4);
if (expected !== actual) {
throw new NotMatchingError(expected, actual);
}
}

function testModule(code, exception) {
Expand Down

0 comments on commit 02a5ead

Please sign in to comment.