Skip to content

Commit

Permalink
[Lexer] Add code, hr lexer unittests.
Browse files Browse the repository at this point in the history
  • Loading branch information
aquariuslt committed Sep 19, 2017
1 parent 6e824c3 commit 6f72512
Show file tree
Hide file tree
Showing 5 changed files with 224 additions and 28 deletions.
1 change: 0 additions & 1 deletion src/lexer/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,5 +17,4 @@ export default class Lexer {
return $this.lexer.lex(source);
}


}
33 changes: 16 additions & 17 deletions src/shared/token.types.ts
Original file line number Diff line number Diff line change
@@ -1,18 +1,17 @@
export default class TokenTypes {
SPACE = 'space';
CODE = 'code';
HEADING = 'heading';
TABLE = 'table';
HR = 'hr';
BLOCKQUOTE_START = 'blockquote_start';
BLOCKQUOTE_END = 'blockquote_end';
LIST_START = 'list_start';
LOOSE_ITEM_START = 'loose_item_start';
LIST_ITEM_START = 'list_item_start';
LIST_ITEM_END = 'list_item_end';
LIST_END = 'list_end';
PARAGRAPH = 'paragraph';
HTML = 'html';
TEXT = 'text';

export default {
SPACE: 'space',
CODE: 'code',
HEADING: 'heading',
TABLE: 'table',
HR: 'hr',
BLOCKQUOTE_START: 'blockquote_start',
BLOCKQUOTE_END: 'blockquote_end',
LIST_START: 'list_start',
LOOSE_ITEM_START: 'loose_item_start',
LIST_ITEM_START: 'list_item_start',
LIST_ITEM_END: 'list_item_end',
LIST_END: 'list_end',
PARAGRAPH: 'paragraph',
HTML: 'html',
TEXT: 'text'
}
34 changes: 34 additions & 0 deletions test/unit/specs/lexer-code.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
import _ from 'lodash';
import Lexer from '@/lexer';

describe('lexer:code', () => {

it('should be using 4 space before to setup code blocks', () => {
const mdString = ` This is a code block\n`;
let lexer = new Lexer();
let tokens = lexer.lex(mdString);

expect(tokens.length).to.eq(1);
expect(_.head(tokens).type).to.eq('code');
});

it('should be using 1 tab before to setup code blocks', () => {
const mdString = `\tThis is a code block\n`;
let lexer = new Lexer();
let tokens = lexer.lex(mdString);

expect(tokens.length).to.eq(1);
expect(_.head(tokens).type).to.eq('code');
});

it('should be using multi-line spaces to setup code blocks', () => {
const mdString = `\ttell application "Foo"\n\t\tbeep\n\tend tell\n`;
let lexer = new Lexer();
let tokens = lexer.lex(mdString);

expect(tokens.length).to.eq(1);
expect(_.head(tokens).type).to.eq('code');
});

// TODO: add ```support```
});
67 changes: 67 additions & 0 deletions test/unit/specs/lexer-hr.spec.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
import _ from 'lodash';
import Lexer from '@/lexer';

describe('lexer:hr', () => {

it('should lex spaced * as hr', () => {
const mdString = `* * *`;
let lexer = new Lexer();
let tokens = lexer.lex(mdString);

expect(tokens.length).to.eq(1);
expect(_.head(tokens).type).to.eq('hr');
});

it('should lex 3 continuous * as hr', () => {
const mdString = `***`;
let lexer = new Lexer();
let tokens = lexer.lex(mdString);

expect(tokens.length).to.eq(1);
expect(_.head(tokens).type).to.eq('hr');
});

it('should lex 5 continuous * as hr', () => {
const mdString = `*****`;
let lexer = new Lexer();
let tokens = lexer.lex(mdString);

expect(tokens.length).to.eq(1);
expect(_.head(tokens).type).to.eq('hr');
});

it('should lex spaced - as hr', () => {
const mdString = `- - -`;
let lexer = new Lexer();
let tokens = lexer.lex(mdString);

expect(tokens.length).to.eq(1);
expect(_.head(tokens).type).to.eq('hr');
});

it('should lex many continues - as hr', () => {
const mdString = `---------------------------------------`;
let lexer = new Lexer();
let tokens = lexer.lex(mdString);

expect(tokens.length).to.eq(1);
expect(_.head(tokens).type).to.eq('hr');
});


it('should not lex 2 (less than 3) spaced * as hr', () => {
const mdString = `* *`;
let lexer = new Lexer();
let tokens = lexer.lex(mdString);

expect(_.head(tokens).type).not.to.eq('hr');
});

it('should not lex 2 (less than 3) continues * as hr', () => {
const mdString = `**`;
let lexer = new Lexer();
let tokens = lexer.lex(mdString);

expect(_.head(tokens).type).not.to.eq('hr');
});
});
117 changes: 107 additions & 10 deletions test/unit/specs/lexer-list.spec.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import _ from 'lodash';
import {expect} from 'chai';
import Lexer from '@/lexer';
// import Tokens from '@/shared/tokens';

describe('lexer:lists', () => {

Expand All @@ -13,23 +12,121 @@ describe('lexer:lists', () => {
expect(tokens.length).to.eq(5);
expect(_.head(tokens).ordered).to.eq(false);
expect(_.head(tokens).type).to.eq('list_start');
expect(_.last(tokens).type).to.eq('list_end');
});

it('should be lex * as unordered list', () => {
// const mdString = `- Red\n- Green\n- Blue`;
// let lexer = new Lexer();
// let tokens = lexer.lex(mdString);
//
// console.log(tokens);
// expect(_.isEqual(tokens.length, 1)).to.equal(true);
// expect(_.isEqual(_.head(tokens).type, 'heading')).to.equal(true);
// expect(_.isEqual(_.head(tokens).depth, 1)).to.equal(true);
const mdString = `* Red\n* Green\n* Blue`;
let lexer = new Lexer();
let tokens = lexer.lex(mdString);

expect(tokens.length).to.eq(11);
expect(_.head(tokens).ordered).to.eq(false);
expect(_.head(tokens).type).to.eq('list_start');
expect(_.last(tokens).type).to.eq('list_end');
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'text');
}).length).to.eq(3);
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'list_item_start');
}).length).to.eq(3);
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'list_item_end');
}).length).to.eq(3);
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'list_start');
}).length).to.eq(1);
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'list_end');
}).length).to.eq(1);
});

it('should be lex + as unordered list', () => {
const mdString = `+ Red\n+ Green\n+ Blue`;
let lexer = new Lexer();
let tokens = lexer.lex(mdString);

expect(tokens.length).to.eq(11);
expect(_.head(tokens).ordered).to.eq(false);
expect(_.head(tokens).type).to.eq('list_start');
expect(_.last(tokens).type).to.eq('list_end');
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'text');
}).length).to.eq(3);
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'list_item_start');
}).length).to.eq(3);
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'list_item_end');
}).length).to.eq(3);
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'list_start');
}).length).to.eq(1);
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'list_end');
}).length).to.eq(1);
});

it('should be lex - as unordered list', () => {
const mdString = `- Red\n- Green\n- Blue`;
let lexer = new Lexer();
let tokens = lexer.lex(mdString);

expect(tokens.length).to.eq(11); // 3x3+1+1
expect(_.head(tokens).ordered).to.eq(false);
expect(_.head(tokens).type).to.eq('list_start');
expect(_.last(tokens).type).to.eq('list_end');
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'text');
}).length).to.eq(3);
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'list_item_start');
}).length).to.eq(3);
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'list_item_end');
}).length).to.eq(3);
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'list_start');
}).length).to.eq(1);
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'list_end');
}).length).to.eq(1);
});

it('should be lex order list with sequence prefix', () => {
const mdString = `1. Bird\n2. McHale\n3. Parish`;
let lexer = new Lexer();
let tokens = lexer.lex(mdString);

expect(tokens.length).to.eq(11);
expect(_.head(tokens).ordered).to.eq(true);
expect(_.head(tokens).type).to.eq('list_start');
expect(_.last(tokens).type).to.eq('list_end');
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'text');
}).length).to.eq(3);
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'list_item_start');
}).length).to.eq(3);
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'list_item_end');
}).length).to.eq(3);
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'list_start');
}).length).to.eq(1);
expect(_.filter(tokens, (token) => {
return _.isEqual(token.type, 'list_end');
}).length).to.eq(1);
});

it('should be not influence with unsorted sequence', () => {
const mdStringWithSortedSequence = `1. Bird\n2. McHale\n3. Parish`;
const mdStringWithoutSortedSequence = `3. Bird\n1. McHale\n8. Parish`;
let lexer = new Lexer();

let sortedTokens = lexer.lex(mdStringWithSortedSequence);
let unsortedTokens = lexer.lex(mdStringWithoutSortedSequence);

expect(sortedTokens).to.eq(unsortedTokens);
});

});

0 comments on commit 6f72512

Please sign in to comment.