Skip to content

Commit

Permalink
New async approach to loading
Browse files Browse the repository at this point in the history
This makes the loader a bit less aware of internal linter workings, and theoretically speeds things up a little bit.

Feedback from @maniator
  • Loading branch information
Phil Sturgeon authored and Phil Sturgeon committed Apr 17, 2018
1 parent 7e693ad commit 25864fe
Show file tree
Hide file tree
Showing 8 changed files with 319 additions and 213 deletions.
53 changes: 41 additions & 12 deletions lib/linter.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,9 @@

const should = require('should');

let activeRules = [];

const regexFromString = regex => new RegExp(regex.replace(/[\-\[\]{}()*+?.,\\\^$|#\s]/g, "\\$&"))

const ensureRule = (context, rule, shouldAssertion, results) => {
const ensureRule = (context, rule, shouldAssertion) => {
try {
shouldAssertion();
}
Expand All @@ -17,20 +15,47 @@ const ensureRule = (context, rule, shouldAssertion, results) => {
}

const pointer = (context && context.length > 0 ? context[context.length-1] : null);
const result = { pointer, rule, error };
results.push(result);
return { pointer, rule, error };
}
}

const setRules = rules => {
activeRules = rules;
let activeRules = {};

const initialize = () => {
activeRules = {};
};

const createNewRules = rules => {
rules.forEach(rule => createNewRule(rule));
};

const createNewRule = rule => {
if (!rule.enabled) return;
if (!Array.isArray(rule.object)) rule.object = [rule.object];
if (rule.alphabetical && rule.alphabetical.properties && !Array.isArray(rule.alphabetical.properties)) {
rule.alphabetical.properties = [rule.alphabetical.properties];
}
if (rule.truthy && !Array.isArray(rule.truthy)) rule.truthy = [rule.truthy];
activeRules[rule.name] = rule;
}

const relevantRules = skipRules => {
const rules = Object.values(activeRules);

if (!Array.isArray(skipRules)) return rules;

return rules.filter(rule => skipRules.indexOf(rule.name) === -1);
}

const lint = (objectName, object, options = {}) => {
const rules = activeRules;
const { skip } = options;

const rules = relevantRules(skip);
const results = [];

function ensure(rule, func) {
ensureRule(options.context, rule, func, options.lintResults);
const ensure = (rule, func) => {
const result = ensureRule(options.context, rule, func);
if (result) results.push(result);
}

for (const r in rules) {
Expand Down Expand Up @@ -156,9 +181,13 @@ const lint = (objectName, object, options = {}) => {
}
}
}

return results;
}

module.exports = {
setRules,
lint
createNewRule,
createNewRules,
initialize,
lint,
};
99 changes: 66 additions & 33 deletions lib/loader.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,18 @@
const fetch = require('node-fetch');
const fs = require('fs');
const path = require('path');
const linter = require('./linter.js');
const resolver = require('./resolver.js');
const yaml = require('js-yaml');

class ExtendableError extends Error {
constructor(message) {
super(message);
this.name = this.constructor.name;
this.name = this.constructor.name; // TODO get rid of this, just instanceof things is fine
}
}

class NetworkError extends ExtendableError {}
class OpenError extends ExtendableError {}
class ReadError extends ExtendableError {}

Expand All @@ -25,6 +27,28 @@ function readFileAsync(filename, encoding) {
});
}

const fetchUrl = async (url) => {
let response;
try {
response = await fetch(url);
} catch (err) {
throw new NetworkError(err);
}

if (response.ok) {
try {
return await response.json();
} catch (error) {
throw new ReadError('Invalid JSON: ' + error.message);
}
}
if (response.status === 404) {
throw new OpenError('Page not found: ' + url);
}
throw new NetworkError('HTTP error: ' + response.status);
};


function readSpecFile(file, options) {
if (options.verbose) {
console.log('GET ' + file);
Expand All @@ -39,7 +63,7 @@ function readSpecFile(file, options) {
}
else {
// TODO error handlers?
return readFileAsync(file, 'utf8').then(data => data);
return readFileAsync(file, 'utf8');
}
}

Expand All @@ -61,47 +85,53 @@ const readOrError = async (file, options = {}) => {
}
}

function deepMergeRules(ruleNickname, skipRules, rules = []) {
if (ruleNickname.startsWith('http')){
var content = fetch(ruleNickname).then(res => {
if (res.status !== 200) {
throw new Error(`Received status code ${res.status}`);
}
return res.text();
});
} else {
const ruleFile = path.join(__dirname, '../rules/' + ruleNickname + '.json');
var content = fs.readFileSync(ruleFile, 'utf8');
const recursivelyLoadRuleFiles = async (file, loadedFiles, options) => {
const { verbose } = options;

let data;
if (file && file.startsWith('http')) {
if (verbose > 1) console.log('GET ' + file);
data = await fetchUrl(file);
}
else {
const ruleFile = path.join(__dirname, '../rules/' + file + '.json');
if (verbose > 1) console.log('GET ' + ruleFile);
data = yaml.safeLoad(fs.readFileSync(ruleFile, 'utf8'), { json: true });
}

const data = yaml.safeLoad(content, { json: true });
loadedFiles.push(file);

if (typeof data.require == 'string') {
rules = deepMergeRules(data.require, rules);
const requiredLoadedFiles = await recursivelyLoadRuleFiles(data.require, loadedFiles, options);
loadedFiles = loadedFiles.concat(requiredLoadedFiles);
}

for (const r in data.rules) {
const rule = data.rules[r];
if (!rule.enabled) continue;
if (skipRules.indexOf(rule.name) !== -1) continue;
if (!Array.isArray(rule.object)) rule.object = [rule.object];
if (rule.alphabetical && rule.alphabetical.properties && !Array.isArray(rule.alphabetical.properties)) {
rule.alphabetical.properties = [rule.alphabetical.properties];
}
if (rule.truthy && !Array.isArray(rule.truthy)) rule.truthy = [rule.truthy];
rules.push(rule);
if (verbose > 1) {
console.log(`Found ${data.rules.length} rules in ${file}: ${data.rules.map(x => x.name)}`);
}

return rules;
// Tell the linter about these new rules
linter.createNewRules(data.rules);

return loadedFiles;
}

const loadRules = (loadFiles, skipRules = []) => {
const files = (loadFiles.length > 0 ? loadFiles : ['default']);
let loadedRules = [];
for (const f in files) {
loadedRules = loadedRules.concat(deepMergeRules(files[f], skipRules));
async function asyncMap(array, callback) {
const promises = [];
for (let index = 0; index < array.length; index++) {
promises.push(callback(array[index], index, array))
}
return loadedRules;
return Promise.all(promises);
}

const loadRuleFiles = async (loadFiles, options = {}) => {
const { verbose } = options;
linter.initialize();
const files = (loadFiles.length > 0 ? loadFiles : ['default']);
const result = await asyncMap(files, file => recursivelyLoadRuleFiles(file, [], { verbose }));
const flatten = [].concat(...result);
// Unique copy of the array
return [...(new Set(flatten))];
}

const resolveContent = (openapi, options) => {
Expand Down Expand Up @@ -138,7 +168,10 @@ const loadSpec = async (source, options = {}) => {
}

module.exports = {
loadRules,
loadRuleFiles,
loadSpec,
readOrError,
NetworkError,
OpenError,
ReadError,
};
Loading

0 comments on commit 25864fe

Please sign in to comment.