Permalink
Browse files

Minor cleanup

Commenting, rename variables, add a spec
  • Loading branch information...
1 parent 9c5f59e commit b149625c745fa109b026dd0c043352f472ea673e Ryan Patterson committed Feb 24, 2012
Showing with 37 additions and 13 deletions.
  1. +25 −13 lib/jsgrep.js
  2. +12 −0 specs/jspatch.spec.js
View
@@ -86,6 +86,10 @@ Matcher.identifierIsMetavar = function(identifier) {
* @param {Narcissus.parser.Node} target
*/
Matcher.getSourceForNode = function(target) {
+ if (target.removed) {
+ throw new JsgrepError('Tried to get source for a removed node')
+ .setSource(target.tokenizer.filename, target.lineno);
+ }
return target.tokenizer.source.substring(target.start, target.end + 1);
};
@@ -114,13 +118,12 @@ Matcher.getPatternFromPatch = function(patch) {
/**
* Modify the AST of the node to be used with jsgrep.
- *
- * XXX(rpatterson): This is an ugly hack, necessary because if we are in a
- * property initializer, the identifier that we see isn't really an identifier,
- * it's a property name, so we need to label it as such.
*/
Matcher.mangleAST = function(ast) {
const tokens = Narcissus.definitions.tokenIds;
+ // XXX(rpatterson): This is an ugly hack, necessary because if we are in a
+ // property initializer, the identifier that we see isn't really an
+ // identifier, it's a property name, so we need to label it as such.
forEachNode(ast, function(node) {
if (node.type === tokens.PROPERTY_INIT) {
node.children[0].propertyName = true;
@@ -254,6 +257,7 @@ Matcher.prototype.getPatchedCode = function(patch, filename, line) {
tokenizerMatchAst(nLexer, this.boundVars[pLexer.token.value].ast);
} else if (t == tokens.ELLIPSIS) {
+ // Skip over all tokens in each expression matched by the ellipsis
var skipNodes = this.boundVars.ellipses[curEllip++];
_.each(skipNodes, function(n, i) {
nLexer.match(tokens.COMMA);
@@ -351,17 +355,24 @@ Matcher.prototype.applyPatch = function(patch, filename, line) {
this._replaceWithString(this.ast, nodeSource);
};
-function tokenizerMatchAst(lexer, v) {
+/**
+ * Skip over all tokens in search that are in patterns's AST, aborting if there
+ * is any mismatch.
+ *
+ * @param search Tokenizer to skip tokens from.
+ * @param pattern Node containing the tokens to skip.
+ */
+function tokenizerMatchAst(search, pattern) {
const tokens = Narcissus.definitions.tokenIds;
- var vLexer = new Narcissus.lexer.Tokenizer(Matcher.getSourceForNode(v),
- v.tokenizer.filename, v.lineno);
+ var pLexer = new Narcissus.lexer.Tokenizer(Matcher.getSourceForNode(pattern),
+ pattern.tokenizer.filename, pattern.lineno);
var t, t2;
- while ((t = vLexer.get()) != tokens.END) {
- if ((t2 = lexer.get()) != t) {
+ while ((t = pLexer.get()) != tokens.END) {
+ if ((t2 = search.get()) != t) {
throw new JsgrepError('Mismatch while matching context ellipsis. ' +
'Expected ' + tokenString(t) + '; found ' + tokenString(t2))
- .setSource(lexer.filename, lexer.lineno);
+ .setSource(search.filename, search.lineno);
}
}
}
@@ -397,10 +408,11 @@ Matcher.prototype._replaceWithString = function(target, replacement) {
// Node encapsulates the replaced region
node.end += delta;
} else {
- throw new Error('Replaced `' + origSource.substring(start, end) +
+ throw new JsgrepError('Replaced `' + origSource.substring(start, end + 1) +
'` at (' + start + ', ' + end + ') but got confused on `' +
- origSource.substring(node.start, node.end) +
- '` at (' + node.start + ', ' + node.end + ')');
+ origSource.substring(node.start, node.end + 1) +
+ '` at (' + node.start + ', ' + node.end + ')')
+ .setSource(node.tokenizer.filename, node.lineno);
}
});
};
View
@@ -17,6 +17,7 @@ describe('Matcher.getPatchedCode', function() {
"+2"
].join('\n'),
result: '2'
+
}, {
name: 'two replacements',
source: '1, 1',
@@ -25,13 +26,15 @@ describe('Matcher.getPatchedCode', function() {
"+2"
].join('\n'),
result: '2, 2'
+
}, {
name: 'remove var',
source: 'var a = b;',
patch: [
"-var a = b;"
].join('\n'),
result: ''
+
}, {
name: 'match token type',
source: '({ "key": value })',
@@ -40,6 +43,15 @@ describe('Matcher.getPatchedCode', function() {
"+newKey"
].join('\n'),
result: '({ "key": value })'
+
+ }, {
+ name: 'token offsets',
+ source: 'call(args)',
+ patch: [
+ '-call(A)'
+ ].join('\n'),
+ result: ''
+
}
];

0 comments on commit b149625

Please sign in to comment.