Skip to content

Commit

Permalink
Import "as", AST explorer (#123)
Browse files Browse the repository at this point in the history
* implement an ast explorer

* update docs deps

* implement import as

* For both relative and absolute imports

* fix linker deps
  • Loading branch information
ballercat committed Jul 4, 2018
1 parent dc3a6c4 commit 2d40f1d
Show file tree
Hide file tree
Showing 31 changed files with 10,921 additions and 8,024 deletions.
5 changes: 4 additions & 1 deletion .gitignore
Expand Up @@ -37,4 +37,7 @@ jspm_packages

# Optional REPL history
.node_repl_history
.idea/
.idea/

# AST Debug folder
ast_view
7,906 changes: 4,143 additions & 3,763 deletions docs/explorer.js

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion docs/explorer.js.map

Large diffs are not rendered by default.

2 changes: 2 additions & 0 deletions packages/walt-buildtools/print.js
@@ -0,0 +1,2 @@
const print = require('./src/print');
module.exports = print;
24 changes: 20 additions & 4 deletions packages/walt-buildtools/src/patches.js
@@ -1,4 +1,5 @@
"use strict";
const print = require("./print");
const invariant = require("invariant");

// Patch missing type imports with the give dependencies
Expand All @@ -12,6 +13,19 @@ function inferImportTypes(ast, deps, compiler) {
Pair(pair, _) {
return pair;
},
BinaryExpression(binary, transform) {
// "as" keywords only
if (binary.value !== "as") {
return binary;
}

return Object.assign({}, binary, {
params: [
transform(binary.params[0]),
binary.params[1]
]
});
},
// Fix any identifiers here
Identifier(identifier, _) {
const depAST = deps[module.value].ast;
Expand All @@ -21,7 +35,12 @@ function inferImportTypes(ast, deps, compiler) {
types,
userTypes,
} = depAST.meta.AST_METADATA;

const fun = functions[identifier.value];
const glbl = globals[identifier.value];
const externType = types[identifier.value];
const userType = userTypes[identifier.value];

if (fun != null) {
// function arguments and params are _always_ the first two params
const [args, result] = fun.params;
Expand Down Expand Up @@ -64,7 +83,7 @@ function inferImportTypes(ast, deps, compiler) {
newTypes.push(newType);

// for an import to become valid at this point it only needs to be an
// identifier : identifier pair :)
// <identifier : identifier> pair :)
const patched = Object.assign({}, identifier, {
value: ":",
params: [
Expand All @@ -77,7 +96,6 @@ function inferImportTypes(ast, deps, compiler) {
return patched;
}

const glbl = globals[identifier.value];
if (glbl != null) {
// just set to the global type pair and peace out
return Object.assign({}, identifier, {
Expand All @@ -96,7 +114,6 @@ function inferImportTypes(ast, deps, compiler) {

// Unlike function types, user defined types are only needed for the
// compiler to produce a valid binary.
const externType = types[identifier.value];
if (externType != null) {
invariant(
externType.meta.EXPORTED,
Expand All @@ -108,7 +125,6 @@ function inferImportTypes(ast, deps, compiler) {
newTypes.push(Object.assign({}, externType));
}

const userType = userTypes[identifier.value];
if (userType != null) {
invariant(
userType.meta.EXPORTED,
Expand Down
28 changes: 28 additions & 0 deletions packages/walt-buildtools/src/print.js
@@ -0,0 +1,28 @@
// Print nodes like JSX elements
"use strict";

function getValue(node) {
if (node.value.length < 60) {
return node.value;
}

return node.value.slice(0,60) + "...";
}

function print(node) {
if (!node.params.length) {
return `<${node.Type} value="${getValue(node)}" />`
}

return `<${node.Type} value="${getValue(node)}" >
${node.params.filter(Boolean).map(child => {
const childStr = print(child);
return childStr.split("\n").map(s => " " + s).join("\n");
}).join("\n")}
</${node.Type}>`;

};

module.exports = print;
2 changes: 1 addition & 1 deletion packages/walt-compiler/.eslintrc
Expand Up @@ -200,7 +200,7 @@
"operator-assignment": 0, // require assignment operator shorthand where possible or prohibit it entirely (off by default)
"padded-blocks": 0, // enforce padding within blocks (off by default)
"quote-props": [ // require quotes around object literal property names (off by default)
2,
1,
"consistent-as-needed",
{ "keywords": false }
],
Expand Down
175 changes: 132 additions & 43 deletions packages/walt-compiler/dist/walt.js
Expand Up @@ -519,6 +519,7 @@ const precedence = {
"=>": PRECEDENCE_PARAMS,
"(": PRECEDENCE_PARAMS,
",": PRECEDENCE_COMMA,
as: PRECEDENCE_COMMA + 1,
">>": PRECEDENCE_SHIFT,
">>>": PRECEDENCE_SHIFT,
"<<": PRECEDENCE_SHIFT,
Expand Down Expand Up @@ -587,12 +588,37 @@ const maybeIdentifier = ctx => {
};

//
/**
* The expression parser for generating all parsed nodes, uses a modified Shunting
* Yard algo.
*
* @author Arthur Buldauksas <arthurbuldauskas@gmail.com>
*/
// PLEASE READ BEFORE EDITING:
//
// 100% of the program is statements which are made up of expressions. The code
// below is the "engine" to parsing just about everything(useful) in the syntax.
// Take great care editing it.
//
// * Avoid special cases as much as possible.
// * Leverage precednece and other Shunting Yard rules.
// * Simplify whenever possible, avoid adding code.
//
// Thanks.

const last = list => list[list.length - 1];

const isPunctuatorAndNotBracket = t => t && t.type === Syntax.Punctuator && t.value !== "]" && t.value !== ")";

// Because expressions can be anywhere and likely nested inside another expression
// this nesting is represented with a depth. If we reach an "exit" like a ) or a }
// and drop our depth below zero we know we have escaped our intended expression
// and we bail out.
const predicate = (token, depth) => token.value !== ";" && depth > 0;

// Exceptions to no-keywords-in-expressions
const validKeywordsInExpressions = ["as"];

// Shunting yard
const expression = (ctx, check = predicate) => {
const operators = [];
Expand All @@ -607,20 +633,32 @@ const expression = (ctx, check = predicate) => {
const consume = () => operands.push(operator(ctx, operators, operands));

const eatUntil = condition => {
let prev = last(operators);
while (prev && prev.value !== condition) {
let previous = last(operators);
while (previous && previous.value !== condition) {
consume();
prev = last(operators);
previous = last(operators);
}
};

// The rules for consuming punctuators(+ - , etc.)
const flushOperators = precedence => {
let previous = null;
while ((previous = last(operators)) && previous.Type !== Syntax.Sequence && getPrecedence(previous) >= precedence && getAssociativty(previous) === "left") {
let previous = last(operators);
while (previous &&
// A sequence is a special case. Note that this is a check for a Sequence NODE.
// This is so that math operators don't "eat" already parsed sequences of nodes.
// To put it plainly a comma separated list should never be added to a number.
// Examples include code like: 1, 2, 3, 2 + 2.
previous.Type !== Syntax.Sequence &&
// The rest of this is Shunting Yard rules
getPrecedence(previous) >= precedence && getAssociativty(previous) === "left") {
consume();
previous = last(operators);
}
};

// Process individual punctuators, below are the rules for handling things like
// brackets and code blocks. Other punctuators follow a precedence rule parsing
// approach.
const processPunctuator = () => {
switch (ctx.token.value) {
case "=>":
Expand Down Expand Up @@ -688,6 +726,8 @@ const expression = (ctx, check = predicate) => {
}
};

// Process individual tokens, this will either push to an operand stack or
// process an operator.
const process = () => {
switch (ctx.token.type) {
case Syntax.Constant:
Expand All @@ -704,7 +744,16 @@ const expression = (ctx, check = predicate) => {
case Syntax.Type:
operands.push(builtInType(ctx));
break;
case Syntax.Keyword:
case Syntax.Punctuator:
// Some special keywords may show up in expressions, but only a small
// subset. These keywords are treated as punctuators and processed by
// the overall punctuator rules
// EXAMPLE: the 'as' keyword - import statements consist of a sequence of
// expressions but the as keyword can be used to rename an import within.
if (ctx.token.type === Syntax.Keyword && !validKeywordsInExpressions.includes(ctx.token.value)) {
break;
}
const punctuatorResult = processPunctuator();
if (punctuatorResult != null) {
return punctuatorResult;
Expand All @@ -722,11 +771,13 @@ const expression = (ctx, check = predicate) => {
}
}

// If we get to the end of our available tokens then proceed to eat any left over
// operators and finalize the expression.
while (operators.length) {
consume();
}

// Should be a node
// Last operand should be a node that is at the "root" of this expression
return operands.pop();
};

Expand Down Expand Up @@ -1566,7 +1617,7 @@ const tokenParser = token(parse$1, Syntax.Identifier);
//
const supported$1 = [
// EcmaScript
"break", "if", "else", "import", "from", "export", "return", "switch", "case", "default", "const", "let", "for", "continue", "do", "while", "function",
"break", "if", "else", "import", "as", "from", "export", "return", "switch", "case", "default", "const", "let", "for", "continue", "do", "while", "function",

// s-expression
"global", "module", "type", "lambda",
Expand Down Expand Up @@ -2036,44 +2087,66 @@ const ALIAS = "alias";
// Statics

//
const mapImport = curry_1((options, node, _) => mapNode({
[Syntax.Pair]: (pairNode, __) => {
const { types, functions, globals } = options;
const [identifierNode, typeNode] = pairNode.params;

if (types[typeNode.value] != null) {
// crate a new type

const functionIndex = Object.keys(functions).length;
const typeIndex = Object.keys(types).indexOf(typeNode.value);
const functionNode = _extends({}, identifierNode, {
id: identifierNode.value,
type: types[typeNode.value].type,
meta: {
[FUNCTION_INDEX]: functionIndex,
[TYPE_INDEX]: typeIndex,
FUNCTION_METADATA: types[typeNode.value].meta.FUNCTION_METADATA,
DEFAULT_ARGUMENTS: types[typeNode.value].meta.DEFAULT_ARGUMENTS
}
});
functions[identifierNode.value] = functionNode;
return _extends({}, pairNode, {
params: [functionNode, types[typeNode.value]]
});
}
const mapImport = curry_1((options, node, _) => {
return mapNode({
[Syntax.BinaryExpression]: (as, transform) => {
const [maybePair, asIdentifier] = as.params;
// if the original import is not typed this isn't a valid import and is ignored
if (maybePair.Type !== Syntax.Pair) {
// No transform happens here (the transform is what creates the global fn to reference)
return as;
}
// Continue transforming the import as before, the AS metadata will notify
// the generator to ask for the original import.
const [original, typeNode] = maybePair.params;

return transform(_extends({}, maybePair, {
params: [_extends({}, asIdentifier, {
meta: _extends({}, original.meta, {
// <new-value> AS <original-value>
AS: original.value
})
}), typeNode]
}));
},
[Syntax.Pair]: (pairNode, __) => {
const { types, functions, globals } = options;
const [identifierNode, typeNode] = pairNode.params;

if (types[typeNode.value] != null) {
// crate a new type

const functionIndex = Object.keys(functions).length;
const typeIndex = Object.keys(types).indexOf(typeNode.value);
const functionNode = _extends({}, identifierNode, {
id: identifierNode.value,
type: types[typeNode.value].type,
meta: _extends({}, identifierNode.meta, {
[FUNCTION_INDEX]: functionIndex,
[TYPE_INDEX]: typeIndex,
FUNCTION_METADATA: types[typeNode.value].meta.FUNCTION_METADATA,
DEFAULT_ARGUMENTS: types[typeNode.value].meta.DEFAULT_ARGUMENTS
})
});
functions[identifierNode.value] = functionNode;
return _extends({}, pairNode, {
params: [functionNode, types[typeNode.value]]
});
}

if (!["Table", "Memory"].includes(typeNode.type)) {
const index = Object.keys(globals).length;
if (!["Table", "Memory"].includes(typeNode.type)) {
const index = Object.keys(globals).length;

globals[identifierNode.value] = _extends({}, identifierNode, {
meta: { [GLOBAL_INDEX]: index, [TYPE_CONST]: true },
type: typeNode.type
});
}
globals[identifierNode.value] = _extends({}, identifierNode, {
meta: { [GLOBAL_INDEX]: index, [TYPE_CONST]: true },
type: typeNode.type
});
}

return pairNode;
}
})(node));
return pairNode;
}
})(node);
});

//
const getTypeSize = typeString => {
Expand Down Expand Up @@ -3780,6 +3853,10 @@ function validate(ast, {
},
[Syntax.Import]: (importNode, _) => {
walker({
[Syntax.BinaryExpression]: (binary, __) => {
const [start, end] = binary.range;
problems.push(generateErrorString("Using an 'as' import without a type.", "A type for original import " + binary.params[0].value + " is not defined nor could it be inferred.", { start, end }, filename, GLOBAL_LABEL));
},
[Syntax.Identifier]: (identifier, __) => {
const [start, end] = identifier.range;
problems.push(generateErrorString("Infered type not supplied.", "Looks like you'd like to infer a type, but it was never provided by a linker. Non-concrete types cannot be compiled.", { start, end }, filename, GLOBAL_LABEL));
Expand Down Expand Up @@ -4627,6 +4704,15 @@ const getKindConstant = value => {
}
};

const getFieldName = node => {
let name = node.value;
if (node.meta.AS != null) {
return node.meta.AS;
}

return name;
};

function generateImportFromNode(node) {
const [importsNode, moduleStringLiteralNode] = node.params;
const { value: module } = moduleStringLiteralNode;
Expand All @@ -4636,9 +4722,12 @@ function generateImportFromNode(node) {
walker({
[Syntax.Pair]: (pairNode, _) => {
const [fieldIdentifierNode, typeOrIdentifierNode] = pairNode.params;
const { value: field } = fieldIdentifierNode;

const field = getFieldName(fieldIdentifierNode);
const { value: importTypeValue } = typeOrIdentifierNode;

const kind = getKindConstant(importTypeValue);

const typeIndex = (() => {
const typeIndexMeta = typeOrIdentifierNode.meta[TYPE_INDEX];
if (typeIndexMeta) {
Expand Down

0 comments on commit 2d40f1d

Please sign in to comment.