Skip to content

Commit

Permalink
Remove the output from the unit test.
Browse files Browse the repository at this point in the history
  • Loading branch information
Jason3S committed Feb 23, 2018
1 parent 4141ee9 commit c6b64e8
Show file tree
Hide file tree
Showing 4 changed files with 48 additions and 52 deletions.
38 changes: 38 additions & 0 deletions src/grammar/display.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import chalk, { Chalk } from 'chalk';
import { Grammar } from './grammar';

type ColorMap = [RegExp, Chalk][];

const colorMap: ColorMap = [
[/ keyword/, chalk.yellow],
[/ entity.name/, chalk.blue],
[/ variable/, chalk.greenBright],
[/ string/, chalk.yellowBright],
[/comment/, chalk.dim.green],
[/ punctuation/, chalk.yellow],
[/support.function/, chalk.greenBright],
[/^source$/, chalk.gray]
];

export function colorize(text: string, scopes: string): string {
for (const [reg, fn] of colorMap) {
if (reg.test(scopes)) {
return fn(text);
}
}
return text;
}

export function *formatTokenizeText(text: string, grammar: Grammar, colorizer: (text: string, scopes: string) => string = colorize) {
for (const tr of grammar.tokenizeText(text.split('\n'))) {
const {line: rawLine, lineNumber, tokens} = tr;
const line = rawLine.replace(/\t/g, ' ');
yield `${lineNumber} ${line}`;
const results = tokens.map(t => ({ text: line.slice(t.startIndex, t.endIndex), scopes: t.scopes.join(' ')}));
const w = Math.max(...results.map(t => t.text.length));
for (const {text, scopes} of results) {
yield ` ${colorizer(text.padEnd(w + 2), scopes)} => ${scopes}`;
}
yield '';
}
}
42 changes: 6 additions & 36 deletions src/grammar/grammar.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import { Grammar } from './grammar';
import { expect } from 'chai';
import * as path from 'path';
import * as fs from 'fs-extra';
import chalk, { Chalk } from 'chalk';
import { formatTokenizeText } from './display';

const javascriptGrammarFile = path.join(__dirname, '..', '..', 'samples', 'syntax', 'javascript.tmLanguage.json');
const sampleJavascriptFile = path.join(__dirname, '..', '..', 'samples', 'src', 'sample.js');
Expand All @@ -23,7 +23,7 @@ describe('Validate Grammar', () => {
const sampleFile = sampleGolangFile;
const file = await fs.readFile(sampleFile, 'utf8');
for (const s of formatTokenizeText(file, grammar)) {
console.log(s);
output(s);
}
});

Expand All @@ -45,44 +45,14 @@ describe('Validate Grammar', () => {
expect(last).to.be.eq(line.length);
}
for (const s of formatTokenizeText(file, grammar)) {
console.log(s);
output(s);
}
});

});

type ColorMap = [RegExp, Chalk][];

const colorMap: ColorMap = [
[/ keyword/, chalk.yellow],
[/ entity.name/, chalk.blue],
[/ variable/, chalk.greenBright],
[/ string/, chalk.yellowBright],
[/comment/, chalk.dim.green],
[/ punctuation/, chalk.yellow],
[/support.function/, chalk.greenBright],
[/^source$/, chalk.gray]
];

function colorize(text: string, scopes: string): string {
for (const [reg, fn] of colorMap) {
if (reg.test(scopes)) {
return fn(text);
}
}
return text;
function output(text: string) {
expect(text).to.not.be.undefined;
// console.log(text);
}

function *formatTokenizeText(text: string, grammar: Grammar) {
for (const tr of grammar.tokenizeText(text.split('\n'))) {
const {line: rawLine, lineNumber, tokens} = tr;
const line = rawLine.replace(/\t/g, ' ');
yield `${lineNumber} ${line}`;
const results = tokens.map(t => ({ text: line.slice(t.startIndex, t.endIndex), scopes: t.scopes.join(' ')}));
const w = Math.max(...results.map(t => t.text.length));
for (const {text, scopes} of results) {
yield ` ${colorize(text.padEnd(w + 2), scopes)} => ${scopes}`;
}
yield '';
}
}
12 changes: 4 additions & 8 deletions src/grammar/tokenize.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ describe('Validate Tokenizer', () => {
const text = `const x = 'it\\'s good'; // comment`;
const r = tokenizeLine(text, rule);
const tokens = r.tokens;
console.log(r);
// console.log(r);
let startIndex = 0;
for (const t of tokens) {
expect(t.startIndex).to.equal(startIndex);
Expand Down Expand Up @@ -171,16 +171,12 @@ describe('Validate Tokenizer', () => {
let rule = sampleJavascriptGrammarRule;
expect(lines).to.be.not.empty;
for (const line of lines) {
console.log(line);
const r = tokenizeLine(line, rule);
r.tokens.forEach(t => {
const text = JSON.stringify(line.slice(t.startIndex, t.endIndex));
const scope = t.scopes.join(', ');
console.log(`${text} => ${scope}`);
});
if (line !== '') {
expect(r.tokens).to.not.be.empty;
}
rule = r.state;
}
expect(true).to.be.true;
});

});
Expand Down
8 changes: 0 additions & 8 deletions src/grammar/tokenize.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,6 @@ export function tokenizeLine(text: string, rule: Rule): TokenizeLineResult {
while (offset < text.length) {
const { match, rule: matchingRule } = matchRule(text, offset, rule);
if (match && match.index <= endOffset) {
if (matchingRule.comment) {
console.log(matchingRule.comment);
}
if (match.index > offset) {
tokens.push({ startIndex: offset, endIndex: match.index, scopes: extractScopes(rule) });
}
Expand Down Expand Up @@ -101,13 +98,8 @@ export function tokenizeLine(text: string, rule: Rule): TokenizeLineResult {
}
if (offset === endOffset) {
// process ending rule.
console.log(`End of match: ${rule.comment} EndRegEx: ${endMatch}`);
if (rule.parent && endMatch) {
rule = findBoundingRule(rule);
if (isPatternBeginEnd(rule.pattern)) {
const pattern = rule.pattern;
console.log(`End ${rule.depth}: ${pattern.begin} <--> ${pattern.end} # ` + (pattern.name || pattern.comment || ''));
}
tokens.push(...tokenizeCapture(rule, endMatch, endCaptures(rule.pattern)));
offset = endMatch.index + endMatch[0].length;
}
Expand Down

0 comments on commit c6b64e8

Please sign in to comment.