Skip to content

Commit

Permalink
Enable checkJs (#648)
Browse files Browse the repository at this point in the history
* Add more ts-checks

* cover extattrs

* cover more

* Enable checkJs

* Add .js to import()

* TODO:
  • Loading branch information
saschanaz committed Jan 12, 2022
1 parent 0c7c2e8 commit dc63472
Show file tree
Hide file tree
Showing 29 changed files with 215 additions and 89 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
node_modules/
dist/*
!dist/package.json
.vscode/
3 changes: 2 additions & 1 deletion jsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@
"compilerOptions": {
"target": "es2020",
"module": "es2020",
"moduleResolution": "node"
"moduleResolution": "node",
"checkJs": true
},
"include": [
"lib/"
Expand Down
12 changes: 11 additions & 1 deletion lib/error.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,12 +28,16 @@ function contextAsText(node) {
* @typedef {object} WebIDL2ErrorOptions
* @property {"error" | "warning"} [level]
* @property {Function} [autofix]
* @property {string} [ruleName]
*
* @typedef {ReturnType<typeof error>} WebIDLErrorData
*
* @param {string} message error message
* @param {*} position
* @param {*} current
* @param {*} message
* @param {"Syntax" | "Validation"} kind error type
* @param {WebIDL2ErrorOptions} [options]
* @param {WebIDL2ErrorOptions=} options
*/
function error(
source,
Expand All @@ -52,6 +56,12 @@ function error(
: source.slice(Math.max(position + count, 0), position);
}

/**
* @param {import("./tokeniser.js").Token[]} inputs
* @param {object} [options]
* @param {boolean} [options.precedes]
* @returns
*/
function tokensToText(inputs, { precedes } = {}) {
const text = inputs.map((t) => t.trivia + t.value).join("");
const nextToken = source[position];
Expand Down
4 changes: 1 addition & 3 deletions lib/productions/argument.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
// @ts-check

import { Base } from "./base.js";
import { Default } from "./default.js";
import { ExtendedAttributes } from "./extended-attributes.js";
Expand All @@ -18,7 +16,7 @@ import {

export class Argument extends Base {
/**
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
const start_position = tokeniser.position;
Expand Down
2 changes: 0 additions & 2 deletions lib/productions/array-base.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
// @ts-check

export class ArrayBase extends Array {
constructor({ source, tokens }) {
super();
Expand Down
2 changes: 0 additions & 2 deletions lib/productions/attribute.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
// @ts-check

import { validationError } from "../error.js";
import { idlTypeIncludesDictionary } from "../validators/helpers.js";
import { Base } from "./base.js";
Expand Down
2 changes: 0 additions & 2 deletions lib/productions/base.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
// @ts-check

export class Base {
/**
* @param {object} initializer
Expand Down
4 changes: 1 addition & 3 deletions lib/productions/callback-interface.js
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
// @ts-check

import { Container } from "./container.js";
import { Operation } from "./operation.js";
import { Constant } from "./constant.js";

export class CallbackInterface extends Container {
/**
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser, callback, { partial = null } = {}) {
const tokens = { callback };
Expand Down
5 changes: 1 addition & 4 deletions lib/productions/constructor.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { argument_list, autoParenter } from "./helpers.js";

export class Constructor extends Base {
/**
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
const base = tokeniser.consume("constructor");
Expand Down Expand Up @@ -31,9 +31,6 @@ export class Constructor extends Base {
}

*validate(defs) {
if (this.idlType) {
yield* this.idlType.validate(defs);
}
for (const argument of this.arguments) {
yield* argument.validate(defs);
}
Expand Down
3 changes: 1 addition & 2 deletions lib/productions/container.js
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,8 @@ function inheritance(tokeniser) {

export class Container extends Base {
/**
* @template T
* @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {T} instance
* @param {*} instance TODO: This should be {T extends Container}, but see https://github.com/microsoft/TypeScript/issues/4628
* @param {*} args
*/
static parse(tokeniser, instance, { inheritable, allowedMembers }) {
Expand Down
4 changes: 2 additions & 2 deletions lib/productions/default.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { const_data, const_value } from "./helpers.js";

export class Default extends Base {
/**
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
const assign = tokeniser.consume("=");
Expand Down Expand Up @@ -50,7 +50,7 @@ export class Default extends Base {
return const_data(this.expression[0]).negative;
}

/** @param {import("../writer.js").Writer)} w */
/** @param {import("../writer.js").Writer} w */
write(w) {
return w.ts.wrap([
w.token(this.tokens.assign),
Expand Down
4 changes: 1 addition & 3 deletions lib/productions/dictionary.js
Original file line number Diff line number Diff line change
@@ -1,11 +1,9 @@
// @ts-check

import { Container } from "./container.js";
import { Field } from "./field.js";

export class Dictionary extends Container {
/**
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {object} [options]
* @param {import("../tokeniser.js").Token} [options.partial]
*/
Expand Down
6 changes: 3 additions & 3 deletions lib/productions/enum.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,9 @@ import { list, unescape, autoParenter } from "./helpers.js";
import { WrappedToken } from "./token.js";
import { Base } from "./base.js";

class EnumValue extends WrappedToken {
export class EnumValue extends WrappedToken {
/**
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
const value = tokeniser.consumeKind("string");
Expand Down Expand Up @@ -36,7 +36,7 @@ class EnumValue extends WrappedToken {

export class Enum extends Base {
/**
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
/** @type {Base["tokens"]} */
Expand Down
24 changes: 13 additions & 11 deletions lib/productions/extended-attributes.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import { list, argument_list, autoParenter, unescape } from "./helpers.js";
import { validationError } from "../error.js";

/**
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {string} tokenName
*/
function tokens(tokeniser, tokenName) {
Expand All @@ -26,15 +26,17 @@ const shouldBeLegacyPrefixed = [
];

const renamedLegacies = new Map([
...shouldBeLegacyPrefixed.map((name) => [name, `Legacy${name}`]),
.../** @type {[string, string][]} */ (
shouldBeLegacyPrefixed.map((name) => [name, `Legacy${name}`])
),
["NamedConstructor", "LegacyFactoryFunction"],
["OverrideBuiltins", "LegacyOverrideBuiltIns"],
["TreatNullAs", "LegacyNullToEmptyString"],
]);

/**
* This will allow a set of extended attribute values to be parsed.
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
function extAttrListItems(tokeniser) {
for (const syntax of extAttrValueSyntax) {
Expand All @@ -48,9 +50,9 @@ function extAttrListItems(tokeniser) {
);
}

class ExtendedAttributeParameters extends Base {
export class ExtendedAttributeParameters extends Base {
/**
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
const tokens = { assign: tokeniser.consume("=") };
Expand Down Expand Up @@ -100,7 +102,7 @@ class ExtendedAttributeParameters extends Base {
return null;
}

/** @param {import("../writer.js").Writer)} w */
/** @param {import("../writer.js").Writer} w */
write(w) {
const { rhsType } = this;
return w.ts.wrap([
Expand All @@ -120,7 +122,7 @@ class ExtendedAttributeParameters extends Base {

export class SimpleExtendedAttribute extends Base {
/**
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
const name = tokeniser.consumeKind("identifier");
Expand Down Expand Up @@ -194,7 +196,7 @@ information.`;
}
}

/** @param {import("../writer.js").Writer)} w */
/** @param {import("../writer.js").Writer} w */
write(w) {
return w.ts.wrap([
w.ts.trivia(this.tokens.name.trivia),
Expand Down Expand Up @@ -226,13 +228,13 @@ function renameLegacyExtendedAttribute(extAttr) {
// seems to be used
export class ExtendedAttributes extends ArrayBase {
/**
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
const tokens = {};
tokens.open = tokeniser.consume("[");
if (!tokens.open) return new ExtendedAttributes({});
const ret = new ExtendedAttributes({ source: tokeniser.source, tokens });
if (!tokens.open) return ret;
ret.push(
...list(tokeniser, {
parser: SimpleExtendedAttribute.parse,
Expand Down Expand Up @@ -262,7 +264,7 @@ export class ExtendedAttributes extends ArrayBase {
}
}

/** @param {import("../writer.js").Writer)} w */
/** @param {import("../writer.js").Writer} w */
write(w) {
if (!this.length) return "";
return w.ts.wrap([
Expand Down
2 changes: 1 addition & 1 deletion lib/productions/field.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ import { Default } from "./default.js";

export class Field extends Base {
/**
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
static parse(tokeniser) {
/** @type {Base["tokens"]} */
Expand Down
31 changes: 16 additions & 15 deletions lib/productions/helpers.js
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ export function unescape(identifier) {

/**
* Parses comma-separated list
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {object} args
* @param {Function} args.parser parser function for each item
* @param {boolean} [args.allowDangler] whether to allow dangling comma
Expand Down Expand Up @@ -46,7 +46,7 @@ export function list(tokeniser, { parser, allowDangler, listName = "list" }) {
}

/**
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
export function const_value(tokeniser) {
return (
Expand Down Expand Up @@ -86,7 +86,7 @@ export function const_data({ type, value }) {
}

/**
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
export function primitive_type(tokeniser) {
function integer_type() {
Expand All @@ -109,7 +109,7 @@ export function primitive_type(tokeniser) {
}

const { source } = tokeniser;
const num_type = integer_type(tokeniser) || decimal_type(tokeniser);
const num_type = integer_type() || decimal_type();
if (num_type) return num_type;
const base = tokeniser.consume(
"bigint",
Expand All @@ -124,7 +124,7 @@ export function primitive_type(tokeniser) {
}

/**
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
export function argument_list(tokeniser) {
return list(tokeniser, {
Expand All @@ -134,8 +134,8 @@ export function argument_list(tokeniser) {
}

/**
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {string} typeName
* @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {string=} typeName (TODO: See Type.type for more details)
*/
export function type_with_extended_attributes(tokeniser, typeName) {
const extAttrs = ExtendedAttributes.parse(tokeniser);
Expand All @@ -145,8 +145,8 @@ export function type_with_extended_attributes(tokeniser, typeName) {
}

/**
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {string} typeName
* @param {import("../tokeniser.js").Tokeniser} tokeniser
* @param {string=} typeName (TODO: See Type.type for more details)
*/
export function return_type(tokeniser, typeName) {
const typ = Type.parse(tokeniser, typeName || "return-type");
Expand All @@ -165,7 +165,7 @@ export function return_type(tokeniser, typeName) {
}

/**
* @param {import("../tokeniser").Tokeniser} tokeniser
* @param {import("../tokeniser.js").Tokeniser} tokeniser
*/
export function stringifier(tokeniser) {
const special = tokeniser.consume("stringifier");
Expand Down Expand Up @@ -202,8 +202,7 @@ export function getMemberIndentation(parentTrivia) {
}

/**
* @param {object} def
* @param {import("./extended-attributes.js").ExtendedAttributes} def.extAttrs
* @param {import("./interface.js").Interface} def
*/
export function autofixAddExposedWindow(def) {
return () => {
Expand Down Expand Up @@ -257,7 +256,7 @@ export function findLastIndex(array, predicate) {

/**
* Returns a proxy that auto-assign `parent` field.
* @template T
* @template {Record<string | symbol, any>} T
* @param {T} data
* @param {*} [parent] The object that will be assigned to `parent`.
* If absent, it will be `data` by default.
Expand All @@ -273,17 +272,18 @@ export function autoParenter(data, parent) {
// `autoParenter(parse())` where the function may return nothing.
return data;
}
return new Proxy(data, {
const proxy = new Proxy(data, {
get(target, p) {
const value = target[p];
if (Array.isArray(value)) {
if (Array.isArray(value) && p !== "source") {
// Wraps the array so that any added items will also automatically
// get their `parent` values.
return autoParenter(value, target);
}
return value;
},
set(target, p, value) {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/47357
target[p] = value;
if (!value) {
return true;
Expand All @@ -300,4 +300,5 @@ export function autoParenter(data, parent) {
return true;
},
});
return proxy;
}

0 comments on commit dc63472

Please sign in to comment.