diff --git a/lib/kyuri/core.js b/lib/kyuri/core.js index 2faba0f..6c9f547 100644 --- a/lib/kyuri/core.js +++ b/lib/kyuri/core.js @@ -9,11 +9,12 @@ * */ -var parser = require('./parser').parser, - i18n = require('./i18n').i18n, - Lexer = require('./lexer').Lexer; +var Parser = require('./parser').Parser, + Lexer = require('./lexer').Lexer, + i18n = require('./i18n').i18n; var core = exports, + parser = new Parser(), lexer = new Lexer('en', i18n); exports.compile = function (code, options) { @@ -40,24 +41,3 @@ exports.nodes = function (code) { exports.i18n = i18n; exports.lexer = lexer; - -// -// Monkey slap the Jison parser since we're providing our own lexer. -// -parser.lexer = { - lex: function() { - var token; - token = this.tokens[this.pos] || [""]; - this.pos += 1; - this.yylineno = token[2]; - this.yytext = token[1]; - return token[0]; - }, - setInput: function(tokens) { - this.tokens = tokens; - return (this.pos = 0); - }, - upcomingInput: function() { - return ""; - } -}; diff --git a/lib/kyuri/lexer.js b/lib/kyuri/lexer.js index de11042..4f08975 100644 --- a/lib/kyuri/lexer.js +++ b/lib/kyuri/lexer.js @@ -10,8 +10,10 @@ var helpers = require('./helpers'), sys = require('sys'), eyes = require('eyes'); -var MULTI_DENT = /^(\t+)(\.)?/, - SENTENCE = /([\w+\s+]+)/i; +var MULTI_DENT = /^(\t+)(\.)?/, + IS_EXAMPLE_ROW = /^([\|\s+\S+]+\s+\|\s*)$/, + PARSE_EXAMPLE_ROW = /\|\s*(\S+)/gi, + SENTENCE = /([\w+\s+]+)/i; var Lexer = function (language, i18n) { // Set the i18n dictionary on the lexer for later use @@ -44,13 +46,13 @@ var Lexer = function (language, i18n) { this.matchers = {}; for (var index in this.headers) { var key = this.headers[index]; - var base = '(' + this.keywords[key].join('|') + ')(\\:\\s*)'; + var base = '^(' + this.keywords[key].join('|') + ')(\\:\\s*)'; this.matchers[key] = new RegExp(base, 'i'); } for (var index in this.operators) { var key = this.operators[index]; - var base = '(' + this.keywords[key].join('|') + ')'; + var base = '^(' + this.keywords[key].join('|') + ')'; this.matchers[key] = new RegExp(base, 'i'); } }; @@ -112,13 +114,21 @@ Lexer.prototype = { return this.gherkinToken('background'); }, - examplesToken: function () { - + examplesToken: function () { return this.gherkinToken('examples'); }, examplesRowToken: function () { + var match; + if (!(match = this.match(IS_EXAMPLE_ROW, 1))) { + return false; + } + + this.i += match.length; + var matches = this.match(PARSE_EXAMPLE_ROW); + this.token('EXAMPLE_ROW', matches.map(function (item) { return item.replace('|','').trim() })); + return true; }, operatorToken: function () { diff --git a/lib/kyuri/parser.js b/lib/kyuri/parser.js index 716e037..cc19329 100644 --- a/lib/kyuri/parser.js +++ b/lib/kyuri/parser.js @@ -1,4 +1,97 @@ -// -// Stub this out until we have jison giving us something... -// -exports.parser = {}; \ No newline at end of file +/* + * parser.js: Custom state machine based Parser for Kyuri + * + * (C) 2010 Charlie Robbins + * MIT LICENSE + * + */ + +var Parser = function () { + +}; + +var states = { + start: { + transitions: { + 'FEATURE': { + value: 'FEATURE', + next: 'feature' + } + } + }, + + finish: { + transitions: { /* EMPTY */ } + }, + + fail: { + transitions: { /* EMPTY */ } + }, + + feature: { + transitions: { + 'SENTENCE': { + value: '*', + next: 'featureHeader' + } + } + }, + + featureHeader: { + transitions: { + 'TERMINATOR': { + value: '*', + next: 'featureHeader', + last: ['TERMINATOR', 'SENTENCE'] + }, + 'SENTENCE': { + value: '*', + next: 'featureDescription', + last: 'TERMINATOR' + } + } + }, + + featureDescription: { + transitions: { + 'TERMINATOR': { + value: '*', + next: 'featureDescription', + last: 'SENTENCE' + }, + 'SENTENCE': { + value: '*', + next: 'featureDescription', + last: 'TERMINATOR' + }, + 'INDENT': { + value: 1, + next: 'featureDescription', + last: 'TERMINATOR' + } + 'SCENARIO': { + value: 'SCENARIO', + next: 'scenario', + last: 'INDENT' + }, + } + }, + + scenario: { + transitions: { + 'SENTENCE': { + value: '*', + next: 'scenarioHeader', + last: 'SCENARIO' + } + } + } +}; + +Parser.prototype = { + parse: function (tokens) { + + } +}; + +exports.Parser = Parser; \ No newline at end of file diff --git a/test/lexer-test.js b/test/lexer-test.js index ddaf889..9fff1ae 100644 --- a/test/lexer-test.js +++ b/test/lexer-test.js @@ -29,13 +29,13 @@ vows.describe('kyuri/lexer').addBatch({ assert.isNotNull(data.toString()); eyes.inspect(kyuri.tokens(data.toString())); } - },/* + }, "lexing complex.feature": { topic: readAllLines(path.join(__dirname, '..', 'examples', 'complex.feature')), "should lex correctly": function (err, data) { assert.isNotNull(data.toString()); eyes.inspect(kyuri.tokens(data.toString())); } - }*/ + } } }).export(module);