From 071c26245965e0ce43c62d1f0abaa356d0a670b8 Mon Sep 17 00:00:00 2001 From: Sergio Oliveira Date: Mon, 5 Dec 2022 22:30:55 +0100 Subject: [PATCH 1/2] Refactor code and prepare CI to publish. --- .github/workflows/build_test.yml | 4 ++-- .gitignore | 17 +++++++++-------- .npmignore | 16 +++++++++++----- .npmrc | 4 ++++ package-lock.json | 25 +++++++++++++++++++++++++ package.json | 19 ++++++++++++------- src/{parser => }/config.ts | 0 src/{parser => }/index.ts | 0 src/{parser => }/parser.ts | 0 src/{parser => }/statementParser.ts | 0 src/{parser => }/tokenizer.ts | 0 src/{parser => }/utilities.ts | 0 test/parser.test.ts | 4 ++-- test/statementParser.test.ts | 4 ++-- test/tokenize.test.ts | 2 +- test/utilities.test.ts | 8 ++++---- tsconfig.json | 15 +++++---------- 17 files changed, 77 insertions(+), 41 deletions(-) create mode 100644 .npmrc rename src/{parser => }/config.ts (100%) rename src/{parser => }/index.ts (100%) rename src/{parser => }/parser.ts (100%) rename src/{parser => }/statementParser.ts (100%) rename src/{parser => }/tokenizer.ts (100%) rename src/{parser => }/utilities.ts (100%) diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index db3ad48..002e225 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -22,5 +22,5 @@ jobs: node-version: '16.x' cache: 'npm' - run: npm install - - run: npm run compile - - run: npm test + - run: npm run build + - run: npm run test diff --git a/.gitignore b/.gitignore index 6db501b..fdcd296 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,9 @@ -*.jrnl -*.log -out -node_modules -*.vsix -.project -.settings -.classpath +# Dependency directory +node_modules/ + +# Editors +.idea/ + +# Project build +lib/ +*.tgz diff --git a/.npmignore b/.npmignore index b03f0d1..b83754b 100644 --- a/.npmignore +++ b/.npmignore @@ -1,6 +1,12 @@ -*.ts -!*.d.ts -tsconfig.json -*.tgz -.npmrc* +# Exclude all files +* + +# Add lib/ +!lib/** +# Exclude .map in lib/ *.map + +# Add documentation +!LICENSE +!CHANGELOG.md +!README.md diff --git a/.npmrc b/.npmrc new file mode 100644 index 0000000..bbd9e9e --- /dev/null +++ b/.npmrc @@ -0,0 +1,4 @@ +always-auth=true +registry=https://registry.npmjs.org +//registry.npmjs.org/:_authToken=${PARSER_TOKEN} +//registry.npmjs.org/:username=${PARSER_USER} diff --git a/package-lock.json b/package-lock.json index e130876..620fa7f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -17,6 +17,7 @@ "@types/node": "^18.11.4", "jest": "^29.2.1", "jsonc-parser": "^3.2.0", + "rimraf": "^3.0.2", "ts-jest": "^29.0.3", "tslint": "^5.20.1", "typescript": "^4.8.4" @@ -4538,6 +4539,21 @@ "node": ">=10" } }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/semver": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", @@ -8563,6 +8579,15 @@ "integrity": "sha512-J1l+Zxxp4XK3LUDZ9m60LRJF/mAe4z6a4xyabPHk7pvK5t35dACV32iIjJDFeWZFfZlO29w6SZ67knR0tHzJtQ==", "dev": true }, + "rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "requires": { + "glob": "^7.1.3" + } + }, "semver": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", diff --git a/package.json b/package.json index 9aba00b..e2d5fae 100644 --- a/package.json +++ b/package.json @@ -9,23 +9,27 @@ "scripting", "language" ], - "homepage": "https://github.com/ing-bank/vscode-psl#readme", + "homepage": "https://github.com/ing-bank/psl-parser#readme", "bugs": { - "url": "https://github.com/ing-bank/vscode-psl/issues" + "url": "https://github.com/ing-bank/psl-parser/issues" }, "license": "MIT", "author": "atiplea", "contributors": [ "SOliveira" ], - "main": "out/index.js", + "main": "lib/index.js", + "types": "lib/types/index.js", "repository": { "type": "git", - "url": "https://github.com/ing-bank/vscode-psl.git" + "url": "https://github.com/ing-bank/psl-parser.git" }, "scripts": { - "compile": "tsc -p ./", - "watch": "npm run compile -- -watch", + "clean": "rimraf lib", + "buildts": "tsc", + "build": "npm run clean && npm run buildts", + "lint": "tslint -c tslint.json --project .", + "watch": "npm run build -- -watch", "test": "jest" }, "dependencies": { @@ -39,6 +43,7 @@ "ts-jest": "^29.0.3", "tslint": "^5.20.1", "typescript": "^4.8.4", - "jsonc-parser": "^3.2.0" + "jsonc-parser": "^3.2.0", + "rimraf": "^3.0.2" } } diff --git a/src/parser/config.ts b/src/config.ts similarity index 100% rename from src/parser/config.ts rename to src/config.ts diff --git a/src/parser/index.ts b/src/index.ts similarity index 100% rename from src/parser/index.ts rename to src/index.ts diff --git a/src/parser/parser.ts b/src/parser.ts similarity index 100% rename from src/parser/parser.ts rename to src/parser.ts diff --git a/src/parser/statementParser.ts b/src/statementParser.ts similarity index 100% rename from src/parser/statementParser.ts rename to src/statementParser.ts diff --git a/src/parser/tokenizer.ts b/src/tokenizer.ts similarity index 100% rename from src/parser/tokenizer.ts rename to src/tokenizer.ts diff --git a/src/parser/utilities.ts b/src/utilities.ts similarity index 100% rename from src/parser/utilities.ts rename to src/utilities.ts diff --git a/test/parser.test.ts b/test/parser.test.ts index fff3924..995c779 100644 --- a/test/parser.test.ts +++ b/test/parser.test.ts @@ -1,5 +1,5 @@ -import * as parser from '../src/parser/parser'; -import * as tokenizer from '../src/parser/tokenizer'; +import * as parser from '../src/parser'; +import * as tokenizer from '../src/tokenizer'; function getMethod(methodString: string): parser.Method | undefined { const d = parser.parseText(methodString); diff --git a/test/statementParser.test.ts b/test/statementParser.test.ts index c1b15cb..4f9def3 100644 --- a/test/statementParser.test.ts +++ b/test/statementParser.test.ts @@ -1,8 +1,8 @@ import { BinaryOperator, DeclarationStatement, Expression, Identifier, MultiSet, NumericLiteral, PostCondition, Statement, StatementParser, StringLiteral, SyntaxKind, TypeIdentifier, Value, -} from '../src/parser/statementParser'; -import { getTokens, Token } from '../src/parser/tokenizer'; +} from '../src/statementParser'; +import { getTokens, Token } from '../src/tokenizer'; function parse(text: string) { return new StatementParser(getTokens(text)); diff --git a/test/tokenize.test.ts b/test/tokenize.test.ts index 752928c..1dfb420 100644 --- a/test/tokenize.test.ts +++ b/test/tokenize.test.ts @@ -1,4 +1,4 @@ -import {getTokens, Type} from '../src/parser/tokenizer'; +import {getTokens, Type} from '../src/tokenizer'; test('pipe token', () => { let tokenizer = getTokens('|'); diff --git a/test/utilities.test.ts b/test/utilities.test.ts index a6f0a68..06d784c 100644 --- a/test/utilities.test.ts +++ b/test/utilities.test.ts @@ -1,8 +1,8 @@ import * as path from 'path'; -import { FinderPaths } from '../src/parser/config'; -import { MemberClass, ParsedDocument, parseFile } from '../src/parser/parser'; -import * as tokenizer from '../src/parser/tokenizer'; -import * as utilities from '../src/parser/utilities'; +import { FinderPaths } from '../src/config'; +import { MemberClass, ParsedDocument, parseFile } from '../src/parser'; +import * as tokenizer from '../src/tokenizer'; +import * as utilities from '../src/utilities'; function getTokens(str: string): tokenizer.Token[] { return [...tokenizer.getTokens(str)]; diff --git a/tsconfig.json b/tsconfig.json index c047352..7b85e00 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -5,16 +5,11 @@ "lib": [ "es2017" ], - "outDir": "out", - "declaration": true, + "outDir": "lib", "sourceMap": true, - "rootDir": "." - }, - "formatCodeOptions": { - "newLine": "LF" + "declaration": true, + "declarationDir": "lib/types", + "rootDir": "src/" }, - "exclude": [ - "node_modules", - "out" - ] + "include": ["src/**/*"] } From 002051d7800e83e5b6dc7a7193a4c6e245c283f1 Mon Sep 17 00:00:00 2001 From: Sergio Oliveira Date: Tue, 6 Dec 2022 22:22:07 +0100 Subject: [PATCH 2/2] Fix code inspections findings. --- .dictionaries/README.md | 21 ++++++++ .dictionaries/words.dic | 4 ++ .github/workflows/build_test.yml | 2 +- .npmignore | 3 +- .vscode/settings.json | 9 ++-- package.json | 6 +-- src/parser.ts | 13 +++-- src/statementParser.ts | 8 +-- src/tsconfig.json | 7 +++ test/parser.test.ts | 14 +++--- test/statementParser.test.ts | 12 ++--- test/tokenize.test.ts | 75 ++++++++++++++--------------- test/tsconfig.json | 8 +++ test/utilities.test.ts | 12 ++--- tsconfig.json => tsconfig-base.json | 14 +++--- 15 files changed, 124 insertions(+), 84 deletions(-) create mode 100644 .dictionaries/README.md create mode 100644 .dictionaries/words.dic create mode 100644 src/tsconfig.json create mode 100644 test/tsconfig.json rename tsconfig.json => tsconfig-base.json (51%) diff --git a/.dictionaries/README.md b/.dictionaries/README.md new file mode 100644 index 0000000..95129aa --- /dev/null +++ b/.dictionaries/README.md @@ -0,0 +1,21 @@ +# Dictionaries + +This folder contains dictionaries to be used by spell checkers programs to +validate the special words used in this project. + +## Available dictionaries + +### Words + +Contains words used in this project. + +File: [words.dic](words.dic) + +## File format + +Plain text files with the dic extension, containing words separated with a +newline. + +## Adding words guidelines + +Please only add words that make sense, and in the appropriate dictionary. diff --git a/.dictionaries/words.dic b/.dictionaries/words.dic new file mode 100644 index 0000000..480e13d --- /dev/null +++ b/.dictionaries/words.dic @@ -0,0 +1,4 @@ +CLASSDEF +ENDDOC +PROPERTYDEF +REVHIST diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml index 002e225..580cb17 100644 --- a/.github/workflows/build_test.yml +++ b/.github/workflows/build_test.yml @@ -23,4 +23,4 @@ jobs: cache: 'npm' - run: npm install - run: npm run build - - run: npm run test + - run: npm test diff --git a/.npmignore b/.npmignore index b83754b..fc22490 100644 --- a/.npmignore +++ b/.npmignore @@ -3,8 +3,9 @@ # Add lib/ !lib/** -# Exclude .map in lib/ +# Exclude exceptions from lib/ *.map +*.tsbuildinfo # Add documentation !LICENSE diff --git a/.vscode/settings.json b/.vscode/settings.json index 7351568..4ed3209 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,10 +1,9 @@ -// Place your settings in this file to overwrite default and user settings. { "files.exclude": { - "out": false // set this to true to hide the "out" folder with the compiled JS files + "lib": false }, "search.exclude": { - "out": true // set this to false to include "out" folder in search results + "lib": true }, - "editor.rulers": [120] // Recommended typescript ruler -} \ No newline at end of file + "editor.rulers": [120] +} diff --git a/package.json b/package.json index e2d5fae..6d0819f 100644 --- a/package.json +++ b/package.json @@ -25,11 +25,11 @@ "url": "https://github.com/ing-bank/psl-parser.git" }, "scripts": { - "clean": "rimraf lib", - "buildts": "tsc", - "build": "npm run clean && npm run buildts", + "prebuild": "rimraf lib", + "build": "tsc --build src", "lint": "tslint -c tslint.json --project .", "watch": "npm run build -- -watch", + "pretest": "tsc --build src", "test": "jest" }, "dependencies": { diff --git a/src/parser.ts b/src/parser.ts index d809703..e6cfa06 100644 --- a/src/parser.ts +++ b/src/parser.ts @@ -157,7 +157,7 @@ export interface ParsedDocument { declarations: Declaration[]; /** - * An array of PROPERTYDEFs + * An array of PROPERTYDEF */ properties: Property[]; @@ -328,16 +328,17 @@ class Parser { if (documentation) this.activeMethod.documentation = documentation; } } - else if (this.activeToken.isNewLine()) continue; - else this.throwAwayTokensTil(Type.NewLine); + else if (!this.activeToken.isNewLine()) { + this.throwAwayTokensTil(Type.NewLine); + } } return { comments: this.comments, declarations: this.declarations, extending: this.extending, - pslPackage: this.pslPackage, methods: this.methods, properties: this.properties, + pslPackage: this.pslPackage, tokens: this.tokens, }; } @@ -580,7 +581,9 @@ class Parser { } private throwAwayTokensTil(type: Type) { - while (this.next() && this.activeToken.type !== type); + while (this.next()) { + if (this.activeToken.type === type) break; + } } private loadTokenBuffer() { diff --git a/src/statementParser.ts b/src/statementParser.ts index 2b1ced9..be9b632 100644 --- a/src/statementParser.ts +++ b/src/statementParser.ts @@ -1,4 +1,4 @@ -import { getTokens, Token, Type } from './tokenizer'; +import {getTokens, Token, Type} from './tokenizer'; export enum SyntaxKind { ASSIGNMENT, @@ -402,7 +402,7 @@ export class StatementParser { const spaceOrExpression = this.activeToken; if (spaceOrExpression.isSpace()) { this.next(); - return forStatement; // argumentless for + return forStatement; // argument less for } const expression = this.parseExpression(); if (expression) forStatement.expressions.push(expression); @@ -484,13 +484,12 @@ export class StatementParser { while (this.activeToken && this.activeToken.isColon()) { const colonToken = this.activeToken; this.next(true); - const colon: BinaryOperator = { + rootNode = { kind: SyntaxKind.BINARY_OPERATOR, left: rootNode, operator: [colonToken], right: this.parseValue(), }; - rootNode = colon; } return rootNode; } @@ -747,6 +746,7 @@ export function forEachChild(node: Node, f: (n: Node) => boolean) { const declaration = node as DeclarationStatement; if (declaration.args) declaration.args.forEach(arg => forEachChild(arg, f)); f(declaration.type); + break; case SyntaxKind.NUMERIC_LITERAL: case SyntaxKind.STRING_LITERAL: f(node); diff --git a/src/tsconfig.json b/src/tsconfig.json new file mode 100644 index 0000000..85043ff --- /dev/null +++ b/src/tsconfig.json @@ -0,0 +1,7 @@ +{ + "extends": "../tsconfig-base.json", + "compilerOptions": { + "composite": true, + } +} + diff --git a/test/parser.test.ts b/test/parser.test.ts index 995c779..a18d186 100644 --- a/test/parser.test.ts +++ b/test/parser.test.ts @@ -209,7 +209,7 @@ describe('Argument Names', () => { expect(argNameValues).toEqual(['x1', 'x2']); }); - test('1 argument multitype', () => { + test('1 argument multi type', () => { const methodString = 'public static void main(void x1(Integer, Record))'; const result = getMethod(methodString); if (!result) { @@ -220,7 +220,7 @@ describe('Argument Names', () => { expect(argNameValues).toEqual(['x1']); }); - test('2 argument multitype', () => { + test('2 argument multi type', () => { const methodString = 'public static void main(void x1(Integer, Record), void x2(void, String))'; const result = getMethod(methodString); if (!result) { @@ -231,7 +231,7 @@ describe('Argument Names', () => { expect(argNameValues).toEqual(['x1', 'x2']); }); - test('2 argument multitype', () => { + test('2 argument multiline and multi type', () => { const methodString = 'public static void main(void x1(Integer, Record)\n\t, void x2(void, String))'; const result = getMethod(methodString); if (!result) { @@ -299,7 +299,7 @@ describe('Argument Types', () => { expect(argValues).toEqual([['String']]); }); - test('1 argument multitype', () => { + test('1 argument multi type', () => { const methodString = 'public static void main(String x1(Number))'; const result = getMethod(methodString); if (!result) { @@ -681,7 +681,7 @@ describe('type declarations', () => { expect(doc.declarations[0].types[0].value).toEqual('String'); expect(doc.declarations[0].id.value).toEqual('x'); }); - test('mutliple type declaration', () => { + test('multiple type declaration', () => { const declarationString = '\ttype public literal String x,y'; const doc = getParsedDoc(declarationString); expect(doc.declarations[0].types[0].value).toEqual('String'); @@ -689,7 +689,7 @@ describe('type declarations', () => { expect(doc.declarations[1].types[0].value).toEqual('String'); expect(doc.declarations[1].id.value).toEqual('y'); }); - test('mutliple multitype type declaration', () => { + test('multiple multi type type declaration', () => { const declarationString = '\ttype public literal String x(Number,Boolean),y'; const doc = getParsedDoc(declarationString); expect(doc.declarations[0].types[0].value).toEqual('String'); @@ -699,7 +699,7 @@ describe('type declarations', () => { expect(doc.declarations[1].types[0].value).toEqual('String'); expect(doc.declarations[1].id.value).toEqual('y'); }); - test('mutliple type declaration equal sign', () => { + test('multiple type declaration equal sign', () => { const declarationString = '\ttype String x = "hi", y = "hi"'; const doc = getParsedDoc(declarationString); expect(doc.declarations[0].types[0].value).toEqual('String'); diff --git a/test/statementParser.test.ts b/test/statementParser.test.ts index 4f9def3..df732ac 100644 --- a/test/statementParser.test.ts +++ b/test/statementParser.test.ts @@ -1,7 +1,7 @@ import { BinaryOperator, DeclarationStatement, Expression, Identifier, MultiSet, NumericLiteral, PostCondition, Statement, StatementParser, StringLiteral, SyntaxKind, TypeIdentifier, Value, -} from '../src/statementParser'; +} from '../src'; import { getTokens, Token } from '../src/tokenizer'; function parse(text: string) { @@ -126,18 +126,18 @@ describe('recursive tests', () => { expect(start.id).toBe(parser.tokens[2]); }); test('Runtime start', () => { - const parser = parse('Runtime.start("BA",varlist)'); + const parser = parse('Runtime.start("BA",varList)'); const dotNode = parser.parseExpression() as BinaryOperator; const runtime = dotNode.left as Identifier; const start = dotNode.right as Identifier; const args = start.args as Value[]; const ba = args[0]; - const varlist = args[1]; + const varList = args[1]; expect(dotNode.kind === SyntaxKind.BINARY_OPERATOR); expect(runtime.id).toBe(parser.tokens[0]); expect((start).id).toBe(parser.tokens[2]); expect(ba.id).toBe(parser.tokens[5]); - expect(varlist.id).toBe(parser.tokens[8]); + expect(varList.id).toBe(parser.tokens[8]); }); test('grandchild', () => { const parser = parse('a.b.c'); @@ -545,7 +545,7 @@ describe('recursive tests', () => { expect(i.id.value).toBe('i'); expect(initial.id.value).toBe('1'); }); - test('argumentless for loop', () => { + test('argument less for loop', () => { const parser = parse('for set x = 1'); const statements = parser.parseLine(); const forStatement = statements[0]; @@ -563,7 +563,7 @@ describe('recursive tests', () => { expect(args.length).toBe(3); }); test('for order', () => { - const parser = parse('for set seq=tras(seq).order() quit:seq.isNull() do set(tras(seq))'); + const parser = parse('for set seq=array(seq).order() quit:seq.isNull() do set(array(seq))'); const statements = parser.parseLine(); const setStatement = statements[1]; const equal = setStatement.expressions[0] as BinaryOperator; diff --git a/test/tokenize.test.ts b/test/tokenize.test.ts index 1dfb420..7da8216 100644 --- a/test/tokenize.test.ts +++ b/test/tokenize.test.ts @@ -1,21 +1,20 @@ import {getTokens, Type} from '../src/tokenizer'; test('pipe token', () => { - let tokenizer = getTokens('|'); + const tokenizer = getTokens('|'); expect(tokenizer.next().value).toEqual({type: Type.Pipe, value: '|', position: {line: 0, character: 0}}); expect(tokenizer.next().value).toBeUndefined(); -}) +}); test('property def', () => { - let tokenizer = getTokens('#PROPERTYDEF'); + const tokenizer = getTokens('#PROPERTYDEF'); expect(tokenizer.next().value).toEqual({type: Type.NumberSign, value: '#', position: {line: 0, character: 0}}); expect(tokenizer.next().value).toEqual({type: Type.Alphanumeric, value: 'PROPERTYDEF', position: {line: 0, character: 1}}); expect(tokenizer.next().value).toBeUndefined(); -}) - +}); test('property def full', () => { - let tokenizer = getTokens('\t#PROPERTYDEF dummy\t\t\tclass = String\tpublic position = 2'); + const tokenizer = getTokens('\t#PROPERTYDEF dummy\t\t\tclass = String\tpublic position = 2'); expect(tokenizer.next().value).toEqual({type: Type.Tab, value: '\t', position: {line: 0, character: 0}}); expect(tokenizer.next().value).toEqual({type: Type.NumberSign, value: '#', position: {line: 0, character: 1}}); expect(tokenizer.next().value).toEqual({type: Type.Alphanumeric, value: 'PROPERTYDEF', position: {line: 0, character: 2}}); @@ -38,24 +37,24 @@ test('property def full', () => { expect(tokenizer.next().value).toEqual({type: Type.Space, value: ' ', position: {line: 0, character: 54}}); expect(tokenizer.next().value).toEqual({type: Type.Numeric, value: '2', position: {line: 0, character: 55}}); expect(tokenizer.next().value).toBeUndefined(); -}) +}); test('numeric', () => { - let tokenizer = getTokens('1'); + const tokenizer = getTokens('1'); expect(tokenizer.next().value).toEqual({type: Type.Numeric, value: '1', position: {line: 0, character: 0}}); expect(tokenizer.next().value).toBeUndefined(); -}) +}); test('whitespace', () => { - let tabTokenizer = getTokens('\t'); + const tabTokenizer = getTokens('\t'); expect(tabTokenizer.next().value).toEqual({type: Type.Tab, value: '\t', position: {line: 0, character: 0}}); expect(tabTokenizer.next().value).toBeUndefined(); - let spaceTokenizer = getTokens(' '); + const spaceTokenizer = getTokens(' '); expect(spaceTokenizer.next().value).toEqual({type: Type.Space, value: ' ', position: {line: 0, character: 0}}); expect(spaceTokenizer.next().value).toEqual({type: Type.Space, value: ' ', position: {line: 0, character: 1}}); expect(spaceTokenizer.next().value).toBeUndefined(); -}) +}); test('line comment', () => { let tokenizer = getTokens('//line comment'); @@ -80,7 +79,7 @@ test('line comment', () => { expect(tokenizer.next().value).toEqual({type: Type.LineComment, value: '', position: {line: 0, character: 2}}); expect(tokenizer.next().value).toEqual({type: Type.NewLine, value: '\n', position: {line: 0, character: 2}}); expect(tokenizer.next().value).toBeUndefined(); -}) +}); test('block comment', () => { let tokenizer = getTokens('/*a block* / comment*/ alphanumeric'); @@ -96,16 +95,16 @@ test('block comment', () => { expect(tokenizer.next().value).toEqual({type: Type.BlockComment, value: '', position: {line: 0, character: 2}}); expect(tokenizer.next().value).toEqual({type: Type.BlockCommentTerm, value: '*/', position: {line: 0, character: 2}}); expect(tokenizer.next().value).toBeUndefined(); -}) +}); test('documentation block comment', () => { - let tokenizer = getTokens('\t/*DOC -----------------------------------------------------------------\n\tdocumentation\n\t** ENDDOC */'); + const tokenizer = getTokens('\t/*DOC -----------------------------------------------------------------\n\tdocumentation\n\t** ENDDOC */'); expect(tokenizer.next().value).toEqual({type: Type.Tab, value: '\t', position: {line: 0, character: 0}}); expect(tokenizer.next().value).toEqual({type: Type.BlockCommentInit, value: '/*', position: {line: 0, character: 1}}); expect(tokenizer.next().value).toEqual({type: Type.BlockComment, value: 'DOC -----------------------------------------------------------------\n\tdocumentation\n\t** ENDDOC ', position: {line: 0, character: 3}}); expect(tokenizer.next().value).toEqual({type: Type.BlockCommentTerm, value: '*/', position: {line: 2, character: 11}}); expect(tokenizer.next().value).toBeUndefined(); -}) +}); test('string', () => { let tokenizer = getTokens('"this is a string"'); @@ -134,38 +133,38 @@ test('string', () => { expect(tokenizer.next().value).toEqual({type: Type.String, value: 'eggs\nflour\nmilk', position: {line: 0, character: 1}}); expect(tokenizer.next().value).toEqual({type: Type.DoubleQuotes, value: '"', position: {line: 2, character: 4}}); expect(tokenizer.next().value).toBeUndefined(); -}) +}); test('carriage return line feed', () => { - let tokenizer = getTokens('\r\n') - let tokens = []; - for (let token of tokenizer) { + const tokenizer = getTokens('\r\n'); + const tokens = []; + for (const token of tokenizer) { tokens.push(token); } expect(tokens).toHaveLength(2); - expect(tokens[0].value).toBe('\r') -}) + expect(tokens[0].value).toBe('\r'); +}); test('comment newline', () => { - let tokenizer = getTokens('// this is a comment\n') - let tokens = []; - for (let token of tokenizer) { + const tokenizer = getTokens('// this is a psl comment\n'); + const tokens = []; + for (const token of tokenizer) { tokens.push(token); } - expect(tokens[0].type).toBe(Type.LineCommentInit) - expect(tokens[1].type).toBe(Type.LineComment) - expect(tokens[1].value).toBe(' this is a comment') - expect(tokens[2].type).toBe(Type.NewLine) -}) + expect(tokens[0].type).toBe(Type.LineCommentInit); + expect(tokens[1].type).toBe(Type.LineComment); + expect(tokens[1].value).toBe(' this is a psl comment'); + expect(tokens[2].type).toBe(Type.NewLine); +}); test('comment with semicolon', () => { - let tokenizer = getTokens('; this is a comment\n') - let tokens = []; - for (let token of tokenizer) { + const tokenizer = getTokens('; this is a mumps comment\n'); + const tokens = []; + for (const token of tokenizer) { tokens.push(token); } - expect(tokens[0].type).toBe(Type.LineCommentInit) - expect(tokens[1].type).toBe(Type.LineComment) - expect(tokens[1].value).toBe(' this is a comment') - expect(tokens[2].type).toBe(Type.NewLine) -}) + expect(tokens[0].type).toBe(Type.LineCommentInit); + expect(tokens[1].type).toBe(Type.LineComment); + expect(tokens[1].value).toBe(' this is a mumps comment'); + expect(tokens[2].type).toBe(Type.NewLine); +}); diff --git a/test/tsconfig.json b/test/tsconfig.json new file mode 100644 index 0000000..1a0a2fb --- /dev/null +++ b/test/tsconfig.json @@ -0,0 +1,8 @@ +{ + "extends": "../tsconfig-base.json", + "references": [ + { + "path": "../src" + } + ] +} diff --git a/test/utilities.test.ts b/test/utilities.test.ts index 06d784c..f1c363a 100644 --- a/test/utilities.test.ts +++ b/test/utilities.test.ts @@ -1,6 +1,6 @@ import * as path from 'path'; +import { MemberClass, ParsedDocument, parseFile } from '../src'; import { FinderPaths } from '../src/config'; -import { MemberClass, ParsedDocument, parseFile } from '../src/parser'; import * as tokenizer from '../src/tokenizer'; import * as utilities from '../src/utilities'; @@ -69,7 +69,7 @@ describe('completion', () => { expect(result[1].value).toBe('b'); }); test('dot with parens content with parens', () => { - const tokensOnLine: tokenizer.Token[] = getTokens('a(blah(bleh())).b'); + const tokensOnLine: tokenizer.Token[] = getTokens('a(blah(blah())).b'); const index = 10; const result = utilities.getCallTokens(tokensOnLine, index); expect(result[0].value).toBe('a'); @@ -82,7 +82,7 @@ describe('completion', () => { expect(result[0].value).toBe('a'); expect(result[1].value).toBe('.'); }); - test('clusterfuck', () => { + test('clusterDuck', () => { const tokensOnLine: tokenizer.Token[] = getTokens('a.b().c(x(y)).d'); const index = 14; const result = utilities.getCallTokens(tokensOnLine, index); @@ -91,7 +91,7 @@ describe('completion', () => { expect(result[2].value).toBe('c'); expect(result[3].value).toBe('d'); }); - test('clusterfuck2', () => { + test('clusterDuck2', () => { const tokensOnLine: tokenizer.Token[] = getTokens('a.b().c(x(y)).d'); const index = 14; const result = utilities.getCallTokens(tokensOnLine, index); @@ -132,7 +132,7 @@ describe('ParsedDocFinder', () => { projectPsl: [filesDir], tables: [], }; - } + }; beforeAll(async () => { filesDir = path.resolve('test', 'files'); @@ -171,7 +171,7 @@ describe('ParsedDocFinder', () => { expect(result.fsPath).toBe(childFilePath); }); - test('Find method overriden method in child', async () => { + test('Find method overridden method in child', async () => { const paths = getPaths(childFilePath); const finder: utilities.ParsedDocFinder = new utilities.ParsedDocFinder(parsedChild, paths); const result = await searchParser(finder, 'methodInParentAndChild', { character: 0, line: 0 }); diff --git a/tsconfig.json b/tsconfig-base.json similarity index 51% rename from tsconfig.json rename to tsconfig-base.json index 7b85e00..fe77f5d 100644 --- a/tsconfig.json +++ b/tsconfig-base.json @@ -1,15 +1,13 @@ { "compilerOptions": { - "module": "commonjs", - "target": "es2017", + "declaration": true, + "declarationDir": "./lib/types", "lib": [ "es2017" ], - "outDir": "lib", + "module": "commonjs", + "outDir": "./lib", "sourceMap": true, - "declaration": true, - "declarationDir": "lib/types", - "rootDir": "src/" - }, - "include": ["src/**/*"] + "target": "es2017" + } }