diff --git a/.eslintignore b/.eslintignore new file mode 100644 index 00000000..801153be --- /dev/null +++ b/.eslintignore @@ -0,0 +1,2 @@ +test/precache-data/* +test/analyzer-data/* diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md new file mode 100644 index 00000000..f06c5f30 --- /dev/null +++ b/.github/ISSUE_TEMPLATE.md @@ -0,0 +1,29 @@ + + + + +### Description + + +### Versions & Environment + +- polymer-build: +- node: +- Operating System: + +#### Steps to Reproduce + +#### Expected Results + + +#### Actual Results + diff --git a/.gitignore b/.gitignore index 5148e527..de6e1e3b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,37 +1,6 @@ -# Logs -logs -*.log +.DS_Store +.vscode npm-debug.log* - -# Runtime data -pids -*.pid -*.seed - -# Directory for instrumented libs generated by jscoverage/JSCover -lib-cov - -# Coverage directory used by tools like istanbul -coverage - -# nyc test coverage -.nyc_output - -# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) -.grunt - -# node-waf configuration -.lock-wscript - -# Compiled binary addons (http://nodejs.org/api/addons.html) -build/Release - -# Dependency directories -node_modules -jspm_packages - -# Optional npm cache directory -.npm - -# Optional REPL history -.node_repl_history +node_modules/ +lib/ +typings/ diff --git a/.npmignore b/.npmignore new file mode 100644 index 00000000..e69de29b diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..2999f627 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,12 @@ +language: node_js +node_js: + - "6" + - "5" + - "4" +sudo: false +before_script: + - npm install + - npm run init + - npm run build +script: + - npm run test diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..214fff6a --- /dev/null +++ b/LICENSE @@ -0,0 +1,5 @@ +Copyright (c) 2014 The Polymer Project Authors. All rights reserved. +This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt +The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt +Code distributed by Google as part of the polymer project is also +subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt diff --git a/appveyor.yml b/appveyor.yml new file mode 100644 index 00000000..dfe475bc --- /dev/null +++ b/appveyor.yml @@ -0,0 +1,21 @@ +# Test against this version of Node.js +environment: + nodejs_version: "4.4.4" + +# Install scripts. (runs after repo cloning) +install: + # Get the latest stable version of Node.js or io.js + - ps: Install-Product node $env:nodejs_version + # install modules + - npm install + +# Post-install test scripts. +test_script: + # Output useful info for debugging. + - node --version + - npm --version + # run tests + - npm run test + +# Don't actually build. +build: off diff --git a/custom_typings/fs-extra.d.ts b/custom_typings/fs-extra.d.ts new file mode 100644 index 00000000..0ca846fe --- /dev/null +++ b/custom_typings/fs-extra.d.ts @@ -0,0 +1,4 @@ +declare module 'fs-extra' { + export function copySync(source: string, dest: string); + export function readdirSync(path: string): string; +} diff --git a/custom_typings/hydrolysis.d.ts b/custom_typings/hydrolysis.d.ts new file mode 100644 index 00000000..c0f3479b --- /dev/null +++ b/custom_typings/hydrolysis.d.ts @@ -0,0 +1,127 @@ +declare module 'hydrolysis' { + import {Node} from 'dom5'; + interface Options { + filter?: (path: string) => boolean; + } + interface Element { + is: string; + contentHref: string; + desc?: string; + } + interface Behavior { + is: string; + contentHref: string; + desc?: string; + } + + /** + * The metadata for all features and elements defined in one document + */ + interface DocumentDescriptor { + /** + * The elements from the document. + */ + // elements: ElementDescriptor[]; + + /** + * The features from the document + */ + // features: FeatureDescriptor[]; + + /** + * The behaviors from the document + */ + // behaviors: BehaviorDescriptor[]; + + href?: string; + + imports?: DocumentDescriptor[]; + + // parsedScript?: estree.Program; + + html?: { + script: Node[], + style: Node[], + ast: Node + }; + } + + /** + * The metadata of an entire HTML document, in promises. + */ + interface AnalyzedDocument { + /** + * The url of the document. + */ + href: string; + /** + * The parsed representation of the doc. Use the `ast` property to get + * the full `parse5` ast. + */ + // htmlLoaded: Promise; + + /** + * Resolves to the list of this Document's transitive import dependencies. + */ + depsLoaded: Promise; + + /** + * The direct dependencies of the document. + */ + depHrefs: string[]; + /** + * Resolves to the list of this Document's import dependencies + */ + metadataLoaded: Promise; + } + + export class Analyzer { + static analyze(path: string, options: Options): Promise; + + constructor(attachAST: boolean, loader: Loader); + + metadataTree(path: string): Promise; + annotate(): void; + elements: Element[]; + behaviors: Behavior[]; + html: {[path: string]: AnalyzedDocument}; + parsedDocuments: {[path: string]: Node}; + + load(href: string):Promise; + + _getDependencies( + href: string, + found?: {[url:string]: boolean}, + transitive?: boolean) + : Promise + } +// } +// declare module 'hydrolysis/loader/resolver' { + export class Deferred { + promise: Promise; + resolve: (val:(T|PromiseLike))=>void; + reject: (err:any)=>void; + } + + /** + * An object that knows how to resolve resources. + */ + export interface Resolver { + /** + * Attempt to resolve `deferred` with the contents the specified URL. Returns + * false if the Resolver is unable to resolve the URL. + */ + accept(path:string, deferred:Deferred):boolean; + } + + class FSResolver implements Resolver { + constructor(options: any); + accept(path:string, deferred:Deferred):boolean; + } + + export class Loader { + resolvers: Resolver[]; + addResolver(resolver:Resolver): void; + request(uri:string): Promise; + } +} diff --git a/custom_typings/node.d.ts b/custom_typings/node.d.ts new file mode 100644 index 00000000..3e3e84dd --- /dev/null +++ b/custom_typings/node.d.ts @@ -0,0 +1,151 @@ +// copied from lib.es6.d.ts + +declare var console: Console; + +interface Console { + assert(test?: boolean, message?: string, ...optionalParams: any[]): void; + clear(): void; + count(countTitle?: string): void; + debug(message?: string, ...optionalParams: any[]): void; + dir(value?: any, ...optionalParams: any[]): void; + dirxml(value: any): void; + error(message?: any, ...optionalParams: any[]): void; + group(groupTitle?: string): void; + groupCollapsed(groupTitle?: string): void; + groupEnd(): void; + info(message?: any, ...optionalParams: any[]): void; + log(message?: any, ...optionalParams: any[]): void; + // msIsIndependentlyComposed(element: Element): boolean; + profile(reportName?: string): void; + profileEnd(): void; + // select(element: Element): void; + time(timerName?: string): void; + timeEnd(timerName?: string): void; + trace(message?: any, ...optionalParams: any[]): void; + warn(message?: any, ...optionalParams: any[]): void; +} + +declare var Console: { + prototype: Console; + new(): Console; +} + +declare module Intl { + interface CollatorOptions { + usage?: string; + localeMatcher?: string; + numeric?: boolean; + caseFirst?: string; + sensitivity?: string; + ignorePunctuation?: boolean; + } + + interface ResolvedCollatorOptions { + locale: string; + usage: string; + sensitivity: string; + ignorePunctuation: boolean; + collation: string; + caseFirst: string; + numeric: boolean; + } + + interface Collator { + compare(x: string, y: string): number; + resolvedOptions(): ResolvedCollatorOptions; + } + var Collator: { + new (locales?: string[], options?: CollatorOptions): Collator; + new (locale?: string, options?: CollatorOptions): Collator; + (locales?: string[], options?: CollatorOptions): Collator; + (locale?: string, options?: CollatorOptions): Collator; + supportedLocalesOf(locales: string[], options?: CollatorOptions): string[]; + supportedLocalesOf(locale: string, options?: CollatorOptions): string[]; + } + + interface NumberFormatOptions { + localeMatcher?: string; + style?: string; + currency?: string; + currencyDisplay?: string; + useGrouping?: boolean; + minimumIntegerDigits?: number; + minimumFractionDigits?: number; + maximumFractionDigits?: number; + minimumSignificantDigits?: number; + maximumSignificantDigits?: number; + } + + interface ResolvedNumberFormatOptions { + locale: string; + numberingSystem: string; + style: string; + currency?: string; + currencyDisplay?: string; + minimumIntegerDigits: number; + minimumFractionDigits: number; + maximumFractionDigits: number; + minimumSignificantDigits?: number; + maximumSignificantDigits?: number; + useGrouping: boolean; + } + + interface NumberFormat { + format(value: number): string; + resolvedOptions(): ResolvedNumberFormatOptions; + } + var NumberFormat: { + new (locales?: string[], options?: NumberFormatOptions): NumberFormat; + new (locale?: string, options?: NumberFormatOptions): NumberFormat; + (locales?: string[], options?: NumberFormatOptions): NumberFormat; + (locale?: string, options?: NumberFormatOptions): NumberFormat; + supportedLocalesOf(locales: string[], options?: NumberFormatOptions): string[]; + supportedLocalesOf(locale: string, options?: NumberFormatOptions): string[]; + } + + interface DateTimeFormatOptions { + localeMatcher?: string; + weekday?: string; + era?: string; + year?: string; + month?: string; + day?: string; + hour?: string; + minute?: string; + second?: string; + timeZoneName?: string; + formatMatcher?: string; + hour12?: boolean; + timeZone?: string; + } + + interface ResolvedDateTimeFormatOptions { + locale: string; + calendar: string; + numberingSystem: string; + timeZone: string; + hour12?: boolean; + weekday?: string; + era?: string; + year?: string; + month?: string; + day?: string; + hour?: string; + minute?: string; + second?: string; + timeZoneName?: string; + } + + interface DateTimeFormat { + format(date?: Date | number): string; + resolvedOptions(): ResolvedDateTimeFormatOptions; + } + var DateTimeFormat: { + new (locales?: string[], options?: DateTimeFormatOptions): DateTimeFormat; + new (locale?: string, options?: DateTimeFormatOptions): DateTimeFormat; + (locales?: string[], options?: DateTimeFormatOptions): DateTimeFormat; + (locale?: string, options?: DateTimeFormatOptions): DateTimeFormat; + supportedLocalesOf(locales: string[], options?: DateTimeFormatOptions): string[]; + supportedLocalesOf(locale: string, options?: DateTimeFormatOptions): string[]; + } +} diff --git a/custom_typings/plylog.d.ts b/custom_typings/plylog.d.ts new file mode 100644 index 00000000..f57bf11a --- /dev/null +++ b/custom_typings/plylog.d.ts @@ -0,0 +1,15 @@ +declare module 'plylog' { + + class PolymerLogger { + constructor(options: any) + setLevel(newLevel: string) + error(message: string, metadata?: any) + warn(message: string, metadata?: any) + info(message: string, metadata?: any) + debug(message: string, metadata?: any) + } + + export function setVerbose(); + export function setQuiet(); + export function getLogger(name: string): PolymerLogger; +} diff --git a/custom_typings/slash.d.ts b/custom_typings/slash.d.ts new file mode 100644 index 00000000..c5f720ca --- /dev/null +++ b/custom_typings/slash.d.ts @@ -0,0 +1,3 @@ +declare module 'slash' { + export function slash(source: string): string; +} diff --git a/custom_typings/vinyl-fs.d.ts b/custom_typings/vinyl-fs.d.ts new file mode 100644 index 00000000..6d394967 --- /dev/null +++ b/custom_typings/vinyl-fs.d.ts @@ -0,0 +1,15 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +declare module "vinyl-fs" { + interface ISrcOptions { + allowEmpty?: boolean; + } +} diff --git a/gulpfile.js b/gulpfile.js new file mode 100644 index 00000000..fd008fc1 --- /dev/null +++ b/gulpfile.js @@ -0,0 +1,78 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +'use strict'; + +const depcheck = require('depcheck'); +const eslint = require('gulp-eslint'); +const fs = require('fs-extra'); +const gulp = require('gulp'); +const mergeStream = require('merge-stream'); +const mocha = require('gulp-mocha'); +const path = require('path'); +const runSeq = require('run-sequence'); +const tslint = require("gulp-tslint"); +const typescript = require('gulp-typescript'); +const typings = require('gulp-typings'); + +const tsProject = typescript.createProject('tsconfig.json'); + +gulp.task('init', () => gulp.src("./typings.json").pipe(typings())); + +gulp.task('lint', ['tslint', 'eslint', 'depcheck']); + +gulp.task('build', () => + mergeStream( + gulp.src('src/**/*.ts').pipe(typescript(tsProject)), + gulp.src(['src/**/*', '!src/**/*.ts']) + ).pipe(gulp.dest('lib')) +); + +gulp.task('clean', (done) => { + fs.remove(path.join(__dirname, 'lib'), done); +}); + +gulp.task('build-all', (done) => { + runSeq('clean', 'init', 'lint', 'build', done); +}); + +gulp.task('test', ['build'], () => + gulp.src('test/**/*_test.js', {read: false}) + .pipe(mocha({ + ui: 'tdd', + reporter: 'spec', + })) +); + +gulp.task('tslint', () => + gulp.src('src/**/*.ts') + .pipe(tslint({ + configuration: 'tslint.json', + })) + .pipe(tslint.report('verbose'))); + +gulp.task('eslint', () => + gulp.src('test/**/*.js') + .pipe(eslint()) + .pipe(eslint.format()) + .pipe(eslint.failAfterError())); + +gulp.task('depcheck', () => + depcheck(__dirname, {}) + .then((result) => { + let invalidFiles = Object.keys(result.invalidFiles) || []; + let invalidJsFiles = invalidFiles.filter((f) => f.endsWith('.js')); + if (invalidJsFiles.length > 0) { + throw new Error(`Invalid files: ${invalidJsFiles}`); + } + if (result.dependencies.length) { + throw new Error(`Unused dependencies: ${result.dependencies}`); + } + })); diff --git a/package.json b/package.json new file mode 100644 index 00000000..9a15797e --- /dev/null +++ b/package.json @@ -0,0 +1,50 @@ +{ + "name": "polymer-build", + "version": "0.1.0", + "description": "A library of Gulp build tasks", + "main": "lib/polymer-build.js", + "scripts": { + "test": "gulp test" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/Polymer/polymer-build.git" + }, + "author": "The Polymer Project Authors", + "license": "BSD-3-Clause", + "bugs": { + "url": "https://github.com/Polymer/polymer-build/issues" + }, + "homepage": "https://github.com/Polymer/polymer-build#readme", + "dependencies": { + "css-slam": "^1.1.0", + "dom5": "^1.3.1", + "fs-extra": "^0.30.0", + "gulp": "^3.9.1", + "gulp-html-minifier": "^0.1.8", + "gulp-if": "^2.0.0", + "hydrolysis": "^1.23.3", + "merge-stream": "^1.0.0", + "minimatch-all": "^1.0.2", + "multipipe": "^0.3.0", + "plylog": "^0.4.0", + "sw-precache": "^3.1.1", + "through2": "^2.0.1", + "uglify-js": "^2.6.2", + "vinyl": "^1.1.1", + "vinyl-fs": "^2.4.3", + "vulcanize": "^1.14.8" + }, + "devDependencies": { + "chai": "^3.5.0", + "depcheck": "^0.6.3", + "gulp-eslint": "^2.0.0", + "gulp-mocha": "^2.2.0", + "gulp-tslint": "^5.0.0", + "gulp-typescript": "^2.13.4", + "gulp-typings": "^1.3.6", + "run-sequence": "^1.2.0", + "tslint": "^3.10.2", + "vinyl-fs-fake": "^1.1.0" + } +} diff --git a/src/analyzer.ts b/src/analyzer.ts new file mode 100644 index 00000000..b79ebcac --- /dev/null +++ b/src/analyzer.ts @@ -0,0 +1,285 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +import * as fs from 'fs'; +import {Analyzer, Deferred, Loader, Resolver, DocumentDescriptor} from 'hydrolysis'; +import {posix as posixPath} from 'path'; +import * as osPath from 'path'; +import {Transform} from 'stream'; +import File = require('vinyl'); +import {parse as parseUrl} from 'url'; +import * as logging from 'plylog'; +import {Node, queryAll, predicates, getAttribute} from 'dom5'; +import urlFromPath from './url-from-path'; + +const minimatchAll = require('minimatch-all'); +const logger = logging.getLogger('cli.build.analyzer'); + +export interface DocumentDeps { + imports?: Array; + scripts?: Array; + styles?: Array; +} + +export class StreamAnalyzer extends Transform { + + root: string; + entrypoint: string; + shell: string; + fragments: string[]; + allFragments: string[]; + + resolver: StreamResolver; + loader: Loader; + analyzer: Analyzer; + + files = new Map(); + + _analyzeResolve: (DepsIndex) => void; + analyze: Promise; + + constructor(root: string, entrypoint: string, shell: string, fragments: string[]) { + super({objectMode: true}); + this.root = root; + this.entrypoint = entrypoint; + this.shell = shell; + this.fragments = fragments; + + this.allFragments = []; + // It's important that shell is first for document-ordering of imports + if (shell) { + this.allFragments.push(shell); + } + + if (entrypoint && !shell && fragments.length === 0) { + this.allFragments.push(entrypoint); + } + + if (fragments) { + this.allFragments = this.allFragments.concat(fragments); + } + + this.resolver = new StreamResolver(this); + this.loader = new Loader(); + this.loader.addResolver(this.resolver); + this.analyzer = new Analyzer(false, this.loader); + this.analyze = new Promise((resolve, reject) => { + this._analyzeResolve = resolve; + }); + } + + _transform( + file: File, + encoding: string, + callback: (error?, data?: File) => void + ): void { + + this.addFile(file); + + // If this is the entrypoint, hold on to the file, so that it's fully + // analyzed by the time down-stream transforms see it. + if (this.isFragment(file)) { + callback(null, null); + } else { + callback(null, file); + } + } + + _flush(done: (error?) => void) { + this._getDepsToEntrypointIndex().then((depsIndex) => { + // push held back files + for (let fragment of this.allFragments) { + let url = urlFromPath(this.root, fragment); + let file = this.getUrl(url); + if (file == null) { + done(new Error(`no file found for fragment ${fragment}`)); + } + this.push(file); + } + this._analyzeResolve(depsIndex); + done(); + }); + } + + /** + * A side-channel to add files to the resolver that did not come throgh the + * stream transformation. This is for generated files, like + * shared-bundle.html. This should probably be refactored so that the files + * can be injected into the stream. + */ + addFile(file: File): void { + // Badly-behaved upstream transformers (looking at you gulp-html-minifier) + // may use posix path separators on Windows. + let filepath = osPath.normalize(file.path); + // Store only root-relative paths, in URL/posix format + this.files.set(urlFromPath(this.root, filepath), file); + } + + getFile(filepath: string): File { + return this.getUrl(urlFromPath(this.root, filepath)); + } + + getUrl(url: string): File { + if (url.startsWith('/')) { + url = url.substring(1); + } + let file = this.files.get(url); + if (!file) { + logger.debug(`no file for ${url} :(`); + } + return file; + } + + isFragment(file): boolean { + return this.allFragments.indexOf(file.path) !== -1; + } + + _getDepsToEntrypointIndex(): Promise { + let depsPromises = []>this.allFragments.map((f) => + this._getDependencies(urlFromPath(this.root, f))); + + return Promise.all(depsPromises).then((value: any) => { + // tsc was giving a spurious error with `allDeps` as the parameter + let allDeps: DocumentDeps[] = value; + + // An index of dependency -> fragments that depend on it + let depsToFragments = new Map(); + + // An index of fragments -> dependencies + let fragmentToDeps = new Map(); + + let fragmentToFullDeps = new Map(); + + console.assert(this.allFragments.length === allDeps.length); + + for (let i = 0; i < allDeps.length; i++) { + let fragment = this.allFragments[i]; + let deps: DocumentDeps = allDeps[i]; + console.assert(deps != null, `deps is null for ${fragment}`); + + fragmentToDeps.set(fragment, deps.imports); + fragmentToFullDeps.set(fragment, deps); + + for (let dep of deps.imports) { + let entrypointList; + if (!depsToFragments.has(dep)) { + entrypointList = []; + depsToFragments.set(dep, entrypointList); + } else { + entrypointList = depsToFragments.get(dep); + } + entrypointList.push(fragment); + } + } + return { + depsToFragments, + fragmentToDeps, + fragmentToFullDeps, + }; + }); + } + /** + * Attempts to retreive document-order transitive dependencies for `url`. + */ + _getDependencies(url: string): Promise { + let dir = posixPath.dirname(url); + return this.analyzer.metadataTree(url) + .then((tree) => this._getDependenciesFromDescriptor(tree, dir)); + } + + _getDependenciesFromDescriptor(descriptor: DocumentDescriptor, dir: string): DocumentDeps { + let allHtmlDeps = []; + let allScriptDeps = new Set(); + let allStyleDeps = new Set(); + + let deps: DocumentDeps = this._collectScriptsAndStyles(descriptor); + deps.scripts.forEach((s) => allScriptDeps.add(posixPath.resolve(dir, s))); + deps.styles.forEach((s) => allStyleDeps.add(posixPath.resolve(dir, s))); + if (descriptor.imports) { + let queue = descriptor.imports.slice(); + while (queue.length > 0) { + let next = queue.shift(); + if (!next.href) { + continue; + } + allHtmlDeps.push(next.href); + let childDeps = this._getDependenciesFromDescriptor(next, posixPath.dirname(next.href)); + allHtmlDeps = allHtmlDeps.concat(childDeps.imports); + childDeps.scripts.forEach((s) => allScriptDeps.add(s)); + childDeps.styles.forEach((s) => allStyleDeps.add(s)); + } + } + + return { + scripts: Array.from(allScriptDeps), + styles: Array.from(allStyleDeps), + imports: allHtmlDeps, + }; + } + + _collectScriptsAndStyles(tree: DocumentDescriptor): DocumentDeps { + let scripts = []; + let styles = []; + tree.html.script.forEach((script) => { + if (script['__hydrolysisInlined']) { + scripts.push(script['__hydrolysisInlined']); + } + }); + tree.html.style.forEach((style) => { + let href = getAttribute(style, 'href'); + if (href) { + styles.push(href); + } + }); + return { + scripts, + styles + }; + } +} + +export interface DepsIndex { + depsToFragments: Map; + // TODO(garlicnation): Remove this map. + // A legacy map from framents to html dependencies. + fragmentToDeps: Map; + // A map from frament urls to html, js, and css dependencies. + fragmentToFullDeps: Map; +} + +class StreamResolver implements Resolver { + analyzer: StreamAnalyzer; + + constructor(analyzer: StreamAnalyzer) { + this.analyzer = analyzer; + } + + accept(url: string, deferred: Deferred): boolean { + let urlObject = parseUrl(url); + + if (urlObject.hostname || !urlObject.pathname) { + return false; + } + + let urlPath = decodeURIComponent(urlObject.pathname); + let file = this.analyzer.getUrl(urlPath); + + if (file) { + deferred.resolve(file.contents.toString()); + } else { + logger.debug(`No file found for ${urlPath}`); + // If you're template to do the next line, Loader does that for us, so + // don't double reject! + // deferred.reject(new Error(`No file found for ${urlPath}`)); + return false; + } + return true; + } +} diff --git a/src/build.ts b/src/build.ts new file mode 100644 index 00000000..b83c4867 --- /dev/null +++ b/src/build.ts @@ -0,0 +1,221 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +import clone = require('clone'); +import * as fs from 'fs'; +import * as gulp from 'gulp'; +import * as gulpif from 'gulp-if'; +import * as gutil from 'gulp-util'; +import mergeStream = require('merge-stream'); +import * as path from 'path'; +import {PassThrough, Readable} from 'stream'; +import File = require('vinyl'); +import * as vfs from 'vinyl-fs'; +import * as findup from 'findup'; +import * as logging from 'plylog'; + +import {StreamAnalyzer, DepsIndex} from './analyzer'; +import {Bundler} from './bundle'; +import {ProjectConfig} from '../project-config'; +import {HtmlProject} from './html-project'; +import {optimize, OptimizeOptions} from './optimize'; +import {PrefetchTransform} from './prefetch'; +import {waitForAll, compose, ForkedVinylStream} from './streams'; +import {generateServiceWorker, parsePreCacheConfig, SWConfig} from './sw-precache'; + +// non-ES compatible modules +const minimatchAll = require('minimatch-all'); +let logger = logging.getLogger('cli.build.build'); + +function getGulpfile(): any { + // TODO: Should we really be searching procses.cwd()? What about config.root? + let foundGulpfileDir; + try { + foundGulpfileDir = findup.sync(process.cwd(), 'gulpfile.js'); + } catch (e) { + logger.debug(`no gulpfile found (searched up from ${process.cwd()})`); + return null; + } + + let gulpfulePath = path.join(foundGulpfileDir, 'gulpfile.js'); + logger.debug('found gulpfile', { path: gulpfulePath }); + return require(gulpfulePath); +} + +export interface BuildOptions extends OptimizeOptions { + sources?: string[]; + dependencies?: string[]; + swPrecacheConfig?: string; + insertDependencyLinks?: boolean; +} + +export function build(options?: BuildOptions, config?: ProjectConfig): Promise { + return new Promise((buildResolve, _) => { + options = options || {}; + let root = config.root; + let entrypoint = config.entrypoint; + let shell = config.shell; + let fragments = config.fragments; + let swPrecacheConfig = path.resolve( + root, options.swPrecacheConfig || 'sw-precache-config.js'); + let sources = (options.sources || ['**/*']) + .map((p) => path.resolve(root, p)); + let dependencies = (options.dependencies || ['bower_components/**/*']) + .map((p) => path.resolve(root, p)); + let sourceExcludes = [ + '!build', + '!build/**/*', + '!bower_components', + '!bower_components/**/*', + '!node_modules', + '!node_modules/**/*', + ]; + + logger.debug(`shell: ${shell}`); + logger.debug(`entrypoint: ${entrypoint}`); + logger.debug(`dependencies: ${dependencies}`); + if (options.insertDependencyLinks) { + logger.debug(`Additional dependency links will be inserted into application`); + } + + let allSources = []; + allSources.push(entrypoint); + if (shell) allSources.push(shell); + allSources = allSources.concat(fragments, sources, sourceExcludes); + logger.debug(`sources: ${allSources}`); + + let allFragments = []; + if (shell) allFragments.push(shell); + allFragments = allFragments.concat(fragments); + logger.debug(`fragments: ${fragments}`); + + // TODO: let this be set by the user + let optimizeOptions: OptimizeOptions = { + html: { + removeComments: true, + }, + css: { + stripWhitespace: true + }, + js: { + minify: true + } + }; + + // mix in optimization options from build command + if (options.html) { + Object.assign(optimizeOptions.html, options.html); + } + if (options.css) { + Object.assign(optimizeOptions.css, options.css); + } + if (options.js) { + Object.assign(optimizeOptions.js, options.js); + } + + let gulpfile = getGulpfile(); + let userTransformers = gulpfile && gulpfile.transformers; + if (userTransformers) { + logger.debug(`${userTransformers.length} transformers found in gulpfile`); + } + + let sourcesProject = new HtmlProject(); + let depsProject = new HtmlProject(); + let analyzer = new StreamAnalyzer(root, entrypoint, shell, fragments); + let bundler = new Bundler(root, entrypoint, shell, fragments, analyzer); + + logger.info(`Building application...`); + logger.debug(`Reading source files...`); + let sourcesStream = + vfs.src(allSources, {cwdbase: true, allowEmpty: true}) + .pipe(sourcesProject.split) + .pipe(compose(userTransformers)) + .pipe(optimize(optimizeOptions)) + .pipe(sourcesProject.rejoin); + + logger.debug(`Reading dependencies...`); + let depsStream = + vfs.src(dependencies, {cwdbase: true, allowEmpty: true}) + .pipe(depsProject.split) + .pipe(optimize(optimizeOptions)) + .pipe(depsProject.rejoin); + + let allFiles = mergeStream(sourcesStream, depsStream) + .once('data', () => { logger.debug('Analyzing build dependencies...'); }) + .pipe(analyzer); + + let serviceWorkerName = 'service-worker.js'; + + let unbundledPhase = new ForkedVinylStream(allFiles) + .once('data', () => { logger.info('Generating build/unbundled...'); }) + .pipe( + gulpif( + options.insertDependencyLinks, + new PrefetchTransform(root, entrypoint, + shell, fragments, analyzer) + ) + ) + .pipe(vfs.dest('build/unbundled')); + + let bundledPhase = new ForkedVinylStream(allFiles) + .once('data', () => { logger.info('Generating build/bundled...'); }) + .pipe(bundler) + .pipe(vfs.dest('build/bundled')); + + let genSW = (buildRoot: string, deps: string[], swConfig: SWConfig, scriptAndStyleDeps?: string[]) => { + logger.debug(`Generating service worker for ${buildRoot}...`); + logger.debug(`Script and style deps: ${scriptAndStyleDeps}`); + return generateServiceWorker({ + root, + entrypoint, + deps, + scriptAndStyleDeps, + buildRoot, + swConfig: clone(swConfig), + serviceWorkerPath: path.join(root, buildRoot, serviceWorkerName) + }); + }; + + waitForAll([unbundledPhase, bundledPhase]) + .then(() => analyzer.analyze) + .then((depsIndex) => { + let unbundledDeps = analyzer.allFragments + .concat(Array.from(depsIndex.depsToFragments.keys())); + + let fullDeps = Array.from(depsIndex.fragmentToFullDeps.values()); + let scriptAndStyleDeps = new Set(); + fullDeps.forEach(d => { + d.scripts.forEach((s) => scriptAndStyleDeps.add(s)); + d.styles.forEach((s) => scriptAndStyleDeps.add(s)); + }); + + let bundledDeps = analyzer.allFragments + .concat(bundler.sharedBundleUrl); + + parsePreCacheConfig(swPrecacheConfig).then((swConfig) => { + if (swConfig) { + logger.debug(`Service worker config found`, swConfig); + } else { + logger.debug(`No service worker configuration found at ${swPrecacheConfig}, continuing with defaults`); + } + + logger.info(`Generating service workers...`); + return Promise.all([ + genSW('build/unbundled', unbundledDeps, swConfig, Array.from(scriptAndStyleDeps)), + genSW('build/bundled', bundledDeps, swConfig) + ]); + }) + .then(() => { + logger.info('Build complete!'); + buildResolve(); + }); + }); + }); +} diff --git a/src/bundle.ts b/src/bundle.ts new file mode 100644 index 00000000..4a27c45f --- /dev/null +++ b/src/bundle.ts @@ -0,0 +1,305 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +import * as dom5 from 'dom5'; +import * as gulpif from 'gulp-if'; +import * as path from 'path'; +import {posix as posixPath} from 'path'; +import {Transform} from 'stream'; +import File = require('vinyl'); +import * as logging from 'plylog'; +import urlFromPath from './url-from-path'; +import {StreamAnalyzer, DepsIndex} from './analyzer'; +import {compose} from './streams'; + +// non-ES module +const minimatchAll = require('minimatch-all'); +const through = require('through2').obj; +const Vulcanize = require('vulcanize'); +let logger = logging.getLogger('cli.build.bundle'); + +export class Bundler extends Transform { + + entrypoint: string; + root: string; + shell: string; + fragments: string[]; + allFragments: string[]; + + sharedBundleUrl: string; + + analyzer: StreamAnalyzer; + sharedFile: File; + + constructor(root: string, entrypoint: string, + shell: string, fragments: string[], analyzer: StreamAnalyzer) { + super({objectMode: true}); + this.root = root; + this.entrypoint = entrypoint; + this.shell = shell; + this.fragments = fragments; + + this.allFragments = []; + // It's important that shell is first for document-ordering of imports + if (shell) { + this.allFragments.push(shell); + } + + if (entrypoint && !shell && fragments.length === 0) { + this.allFragments.push(entrypoint); + } + + if (fragments) { + this.allFragments = this.allFragments.concat(fragments); + } + + this.analyzer = analyzer; + this.sharedBundleUrl = 'shared-bundle.html'; + } + + _transform( + file: File, + encoding: string, + callback: (error?, data?: File) => void + ): void { + + // If this is the entrypoint, hold on to the file, so that it's fully + // analyzed by the time down-stream transforms see it. + if (this.isEntrypoint(file)) { + callback(null, null); + } else { + callback(null, file); + } + + } + + _flush(done: (error?) => void) { + this._buildBundles().then((bundles: Map) => { + for (let fragment of this.allFragments) { + let file = this.analyzer.getFile(fragment); + console.assert(file != null); + let contents = bundles.get(fragment); + file.contents = new Buffer(contents); + this.push(file); + } + let sharedBundle = bundles.get(this.sharedBundleUrl); + if (sharedBundle) { + let contents = bundles.get(this.sharedBundleUrl); + this.sharedFile.contents = new Buffer(contents); + this.push(this.sharedFile); + } + // end the stream + done(); + }); + } + + isEntrypoint(file): boolean { + return this.allFragments.indexOf(file.path) !== -1; + } + + _buildBundles(): Promise> { + return this._getBundles().then((bundles) => { + let sharedDepsBundle = (this.shell) + ? urlFromPath(this.root, this.shell) + : this.sharedBundleUrl; + let sharedDeps = bundles.get(sharedDepsBundle) || []; + let promises = []; + + if (this.shell) { + let shellFile = this.analyzer.getFile(this.shell); + console.assert(shellFile != null); + let newShellContent = this._addSharedImportsToShell(bundles); + shellFile.contents = new Buffer(newShellContent); + } + + for (let fragment of this.allFragments) { + let fragmentUrl = urlFromPath(this.root, fragment); + let addedImports = (fragment === this.shell && this.shell) + ? [] + : [posixPath.relative(posixPath.dirname(fragmentUrl), sharedDepsBundle)]; + let excludes = (fragment === this.shell && this.shell) + ? [] + : sharedDeps.concat(sharedDepsBundle); + + promises.push(new Promise((resolve, reject) => { + let vulcanize = new Vulcanize({ + abspath: null, + fsResolver: this.analyzer.resolver, + addedImports: addedImports, + stripExcludes: excludes, + inlineScripts: true, + inlineCss: true, + inputUrl: fragmentUrl, + }); + vulcanize.process(null, (err, doc) => { + if (err) { + reject(err); + } else { + resolve({ + url: fragment, + contents: doc, + }); + } + }); + })); + } + // vulcanize the shared bundle + if (!this.shell && sharedDeps && sharedDeps.length !== 0) { + logger.info(`generating shared bundle...`); + promises.push(this._generateSharedBundle(sharedDeps)); + } + return Promise.all(promises).then((bundles) => { + // convert {url,contents}[] into a Map + let contentsMap = new Map(); + for (let bundle of bundles) { + contentsMap.set(bundle.url, bundle.contents); + } + return contentsMap; + }); + }); + } + + _addSharedImportsToShell(bundles: Map): string { + console.assert(this.shell != null); + let shellUrl = urlFromPath(this.root, this.shell); + let shellUrlDir = posixPath.dirname(shellUrl); + let shellDeps = bundles.get(shellUrl) + .map((d) => posixPath.relative(shellUrlDir, d)); + logger.debug('found shell dependencies', { + shellUrl: shellUrl, + shellUrlDir: shellUrlDir, + shellDeps: shellDeps, + }); + + let file = this.analyzer.getFile(this.shell); + console.assert(file != null); + let contents = file.contents.toString(); + let doc = dom5.parse(contents); + let imports = dom5.queryAll(doc, dom5.predicates.AND( + dom5.predicates.hasTagName('link'), + dom5.predicates.hasAttrValue('rel', 'import') + )); + logger.debug('found html import elements', { + imports: imports.map((el) => dom5.getAttribute(el, 'href')), + }); + + // Remove all imports that are in the shared deps list so that we prefer + // the ordering or shared deps. Any imports left should be independent of + // ordering of shared deps. + let shellDepsSet = new Set(shellDeps); + for (let _import of imports) { + let importHref = dom5.getAttribute(_import, 'href'); + if (shellDepsSet.has(importHref)) { + logger.debug(`removing duplicate import element "${importHref}"...`); + dom5.remove(_import); + } + } + + // Append all shared imports to the end of + let head = dom5.query(doc, dom5.predicates.hasTagName('head')); + for (let dep of shellDeps) { + let newImport = dom5.constructors.element('link'); + dom5.setAttribute(newImport, 'rel', 'import'); + dom5.setAttribute(newImport, 'href', dep); + dom5.append(head, newImport); + } + let newContents = dom5.serialize(doc); + return newContents; + } + + _generateSharedBundle(sharedDeps: string[]): Promise { + return new Promise((resolve, reject) => { + let contents = sharedDeps + .map((d) => ``) + .join('\n'); + + let sharedFsPath = path.resolve(this.root, this.sharedBundleUrl); + this.sharedFile = new File({ + cwd: this.root, + base: this.root, + path: sharedFsPath, + contents: new Buffer(contents), + }); + + // make the shared bundle visible to vulcanize + this.analyzer.addFile(this.sharedFile); + + let vulcanize = new Vulcanize({ + abspath: null, + fsResolver: this.analyzer.resolver, + inlineScripts: true, + inlineCss: true, + inputUrl: this.sharedBundleUrl, + }); + vulcanize.process(null, (err, doc) => { + if (err) { + reject(err); + } else { + resolve({ + url: this.sharedBundleUrl, + contents: doc, + }); + } + }); + }); + } + + _getBundles() { + return this.analyzer.analyze.then((indexes) => { + let depsToEntrypoints = indexes.depsToFragments; + let fragmentToDeps = indexes.fragmentToDeps; + let bundles = new Map(); + + let addImport = (from: string, to: string) => { + let imports; + if (!bundles.has(from)) { + imports = []; + bundles.set(from, imports); + } else { + imports = bundles.get(from); + } + if (!imports.includes(to)) { + imports.push(to); + } + }; + + // We want to collect dependencies that appear in > 1 entrypoint, but + // we need to collect them in document order, so rather than iterate + // directly through each dependency in depsToEntrypoints, we iterate + // through fragments in fragmentToDeps, which has dependencies in + // order for each fragment. Then we iterate through dependencies for + // each fragment and look up how many fragments depend on it. + // This assumes an ordering between fragments, since they could have + // conflicting orders between their top level imports. The shell should + // always come first. + for (let fragment of fragmentToDeps.keys()) { + + let fragmentUrl = urlFromPath(this.root, fragment); + let dependencies = fragmentToDeps.get(fragment); + for (let dep of dependencies) { + let fragmentCount = depsToEntrypoints.get(dep).length; + if (fragmentCount > 1) { + if (this.shell) { + addImport(urlFromPath(this.root, this.shell), dep); + // addImport(entrypoint, this.shell); + } else { + addImport(this.sharedBundleUrl, dep); + addImport(fragmentUrl, this.sharedBundleUrl); + } + } else { + addImport(fragmentUrl, dep); + } + } + } + return bundles; + }); + } + +} diff --git a/src/html-project.ts b/src/html-project.ts new file mode 100644 index 00000000..323bb49b --- /dev/null +++ b/src/html-project.ts @@ -0,0 +1,242 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +import * as dom5 from 'dom5'; +import {posix as posixPath} from 'path'; +import * as osPath from 'path'; +import * as logging from 'plylog'; +import {Transform} from 'stream'; +import File = require('vinyl'); + +const logger = logging.getLogger('cli.build.html-project'); +const pred = dom5.predicates; + +const extensionsForType = { + 'text/ecmascript-6': 'js', + 'application/javascript': 'js', + 'text/javascript': 'js', + 'application/x-typescript': 'ts', + 'text/x-typescript': 'ts', +}; + +/** + * Splits and rejoins inline scripts and styles from HTML files. + * + * Use `HtmlProject.prototype.split` and `HtmlProject.prototype.rejoin` to + * surround processing steps that operate on the extracted resources. + * HtmlProject works well with gulp-if to process files based on filename. + */ +export class HtmlProject { + + _splitFiles: Map = new Map(); + _parts: Map = new Map(); + split = new Splitter(this); + rejoin = new Rejoiner(this); + + isSplitFile(parentPath: string): boolean { + return this._splitFiles.has(parentPath); + } + + getSplitFile(parentPath: string): SplitFile { + let splitFile = this._splitFiles.get(parentPath); + if (!splitFile) { + splitFile = new SplitFile(parentPath); + this._splitFiles.set(parentPath, splitFile); + } + return splitFile; + } + + addSplitPath(parentPath: string, childPath: string): void { + let splitFile = this.getSplitFile(parentPath); + splitFile.addPartPath(childPath); + this._parts.set(childPath, splitFile); + } + + getParentFile(childPath: string): SplitFile { + return this._parts.get(childPath); + } + +} + + +/** + * Represents a file that is split into multiple files. + */ +class SplitFile { + path: string; + parts: Map = new Map(); + outstandingPartCount = 0; + vinylFile: File = null; + + constructor(path: string) { + this.path = path; + } + + addPartPath(path: string): void { + this.parts.set(path, null); + this.outstandingPartCount++; + } + + setPartContent(path: string, content: string): void { + console.assert(this.parts.get(path) === null); + console.assert(this.outstandingPartCount > 0); + this.parts.set(path, content); + this.outstandingPartCount--; + } + + get isComplete(): boolean { + return this.outstandingPartCount === 0 && this.vinylFile != null; + } +} + +/** + * Splits HTML files, extracting scripts and styles into separate `File`s. + */ +class Splitter extends Transform { + + static isInlineScript = pred.AND( + pred.hasTagName('script'), + pred.NOT(pred.hasAttr('src')) + ); + + _project: HtmlProject; + + constructor(project) { + super({objectMode: true}); + this._project = project; + } + + _transform(file: File, encoding: string, callback: (error?, data?) => void): void { + let filePath = osPath.normalize(file.path); + if (file.contents && filePath.endsWith('.html')) { + try { + let contents = file.contents.toString(); + let doc = dom5.parse(contents); + let body = dom5.query(doc, pred.hasTagName('body')); + let head = dom5.query(doc, pred.hasTagName('head')); + let scriptTags = dom5.queryAll(doc, Splitter.isInlineScript); + let styleTags = dom5.queryAll(doc, pred.hasTagName('style')); + + let scripts = []; + let styles = []; + + for (let i = 0; i < scriptTags.length; i++) { + let scriptTag = scriptTags[i]; + let source = dom5.getTextContent(scriptTag); + let typeAtribute = dom5.getAttribute(scriptTag, 'type'); + let extension = typeAtribute && extensionsForType[typeAtribute] || 'js'; + let childFilename = `${osPath.basename(filePath)}_script_${i}.${extension}`; + let childPath = osPath.join(osPath.dirname(filePath), childFilename); + scriptTag.childNodes = []; + dom5.setAttribute(scriptTag, 'src', childFilename); + let scriptFile = new File({ + cwd: file.cwd, + base: file.base, + path: childPath, + contents: new Buffer(source), + }); + this._project.addSplitPath(filePath, childPath); + this.push(scriptFile); + } + + let splitContents = dom5.serialize(doc); + let newFile = new File({ + cwd: file.cwd, + base: file.base, + path: filePath, + contents: new Buffer(splitContents), + }); + callback(null, newFile); + } catch (e) { + callback(e, null); + } + } else { + callback(null, file); + } + } +} + +/** + * Joins HTML files split by `Splitter`. + */ +class Rejoiner extends Transform { + + static isExternalScript = pred.AND( + pred.hasTagName('script'), + pred.hasAttr('src') + ); + + _project: HtmlProject; + + constructor(project) { + super({objectMode: true}); + this._project = project; + } + + _transform(file: File, encoding: string, callback: (error?, data?) => void): void { + let filePath = osPath.normalize(file.path); + if (this._project.isSplitFile(filePath)) { + // this is a parent file + let splitFile = this._project.getSplitFile(filePath); + splitFile.vinylFile = file; + if (splitFile.isComplete) { + callback(null, this._rejoin(splitFile)); + } else { + splitFile.vinylFile = file; + callback(); + } + } else { + let parentFile = this._project.getParentFile(filePath); + if (parentFile) { + // this is a child file + parentFile.setPartContent(filePath, file.contents.toString()); + if (parentFile.isComplete) { + callback(null, this._rejoin(parentFile)); + } else { + callback(); + } + } else { + callback(null, file); + } + } + } + + _rejoin(splitFile: SplitFile) { + let file = splitFile.vinylFile; + let filePath = osPath.normalize(file.path); + let contents = file.contents.toString(); + let doc = dom5.parse(contents); + let body = dom5.query(doc, pred.hasTagName('body')); + let head = dom5.query(doc, pred.hasTagName('head')); + let scriptTags = dom5.queryAll(doc, Rejoiner.isExternalScript); + let styleTags = dom5.queryAll(doc, pred.hasTagName('style')); + + for (let i = 0; i < scriptTags.length; i++) { + let scriptTag = scriptTags[i]; + let srcAttribute = dom5.getAttribute(scriptTag, 'src'); + let scriptPath = osPath.join(osPath.dirname(splitFile.path), srcAttribute); + if (splitFile.parts.has(scriptPath)) { + let scriptSource = splitFile.parts.get(scriptPath); + dom5.setTextContent(scriptTag, scriptSource); + dom5.removeAttribute(scriptTag, 'src'); + } + } + + let joinedContents = dom5.serialize(doc); + + return new File({ + cwd: file.cwd, + base: file.base, + path: filePath, + contents: new Buffer(joinedContents), + }); + + } +} diff --git a/src/optimize.ts b/src/optimize.ts new file mode 100644 index 00000000..2bc457d1 --- /dev/null +++ b/src/optimize.ts @@ -0,0 +1,60 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +import * as gulpif from 'gulp-if'; +import * as stream from 'stream'; + +import {UglifyTransform} from './uglify-transform'; +import {compose} from './streams'; + +// not ES compatible +const cssSlam = require('css-slam').gulp; +const htmlmin = require('gulp-html-minifier'); + +export interface OptimizeOptions { + /** + * Enable HTML minification + * + * Options passed to gulp-html-minifier + */ + html?: { + collapseWhitespace?: boolean; + removeComments?: boolean; + }; + /** + * Enable CSS minification + */ + css?: { + stripWhitespace?: boolean; + }; + /** + * Enable JS minification + */ + js?: { + minify?: boolean; + }; +} + +export function optimize(options?: OptimizeOptions) { + let transforms = []; + + if (options) { + if (options.js && options.js.minify) { + transforms.push(new UglifyTransform()); + } + if (options.css && options.css.stripWhitespace) { + transforms.push(cssSlam()); + } + if (options.html) { + transforms.push(gulpif(/\.html$/, htmlmin(options.html))); + } + } + return compose(transforms); +} diff --git a/src/polymer-build.ts b/src/polymer-build.ts new file mode 100644 index 00000000..e69de29b diff --git a/src/prefetch.ts b/src/prefetch.ts new file mode 100644 index 00000000..22fb352f --- /dev/null +++ b/src/prefetch.ts @@ -0,0 +1,154 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +import * as dom5 from 'dom5'; +import * as path from 'path'; +import * as logging from 'plylog'; +import {Transform} from 'stream'; +import File = require('vinyl'); + +import {StreamAnalyzer, DepsIndex} from './analyzer'; + +let logger = logging.getLogger('cli.build.prefech'); + +export class PrefetchTransform extends Transform { + root: string; + entrypoint: string; + shell: string; + fragments: string[]; + allFragments: string[]; + fileMap: Map; + analyzer: StreamAnalyzer; + + constructor( + /** + * Root of the dependencies. + * Will be stripped when making links + */ + root: string, + + /** + * The main HTML file. This will have link rel=prefetches added to it. + */ + entrypoint: string, + + /** + * The app shell. This will have link rel=imports added to it. + */ + shell: string, + + /** + * List of files that will have dependencies flattened with + * `` + */ + fragments: string[], + + /** + * The analyzer to retreive dependency information from. + */ + analyzer: StreamAnalyzer + ) { + super({objectMode: true}); + this.root = root; + this.entrypoint = entrypoint; + this.shell = shell; + this.fragments = fragments; + // clone fragments + this.allFragments = Array.from(fragments); + if (shell) { + this.allFragments.push(shell); + } else { + this.allFragments.push(entrypoint); + } + this.analyzer = analyzer; + this.fileMap = new Map(); + } + + pullUpDeps( + file: File, + deps: string[], + type: 'import' | 'prefetch' + ) { + let contents = file.contents.toString(); + let ast = dom5.parse(contents); + let head = dom5.query(ast, dom5.predicates.hasTagName('head')); + for (let dep of deps) { + if (dep.startsWith(this.root)) { + dep = path.relative(file.dirname, dep); + } + // prefetched deps should be absolute, as they will be in the main file + if (type === 'prefetch') { + dep = path.join('/', dep); + } + let link = dom5.constructors.element('link'); + dom5.setAttribute(link, 'rel', type); + dom5.setAttribute(link, 'href', dep); + dom5.append(head, link); + } + contents = dom5.serialize(ast); + file.contents = new Buffer(contents); + } + + _transform(file: File, enc: string, callback: (err?, file?) => void) { + if (this.isImportantFile(file)) { + // hold on to the file for safe keeping + this.fileMap.set(file.path, file); + callback(null, null); + } else { + callback(null, file); + } + } + + isImportantFile(file) { + return file.path === this.entrypoint || + this.allFragments.indexOf(file.path) > -1; + } + + _flush(done: (err?) => void) { + if (this.fileMap.size === 0) { + return done(); + } + this.analyzer.analyze.then((depsIndex: DepsIndex) => { + let fragmentToDeps = new Map(depsIndex.fragmentToDeps); + + if (this.entrypoint && this.shell) { + let file = this.fileMap.get(this.entrypoint); + // forward shell's dependencies to main to be prefetched + let deps = fragmentToDeps.get(this.shell); + if (deps) { + this.pullUpDeps(file, deps, 'prefetch'); + } + this.push(file); + this.fileMap.delete(this.entrypoint); + } + + for (let im of this.allFragments) { + let file = this.fileMap.get(im); + let deps = fragmentToDeps.get(im); + if (deps) { + this.pullUpDeps(file, deps, 'import'); + } + this.push(file); + this.fileMap.delete(im); + } + + for (let leftover of this.fileMap.keys()) { + logger.warn( + 'File was listed in fragments but not found in stream:', + leftover + ); + this.push(this.fileMap.get(leftover)); + this.fileMap.delete(leftover); + } + + done(); + }); + } +} diff --git a/src/streams.ts b/src/streams.ts new file mode 100644 index 00000000..dcd82270 --- /dev/null +++ b/src/streams.ts @@ -0,0 +1,68 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +import {PassThrough, Readable, Transform} from 'stream'; + +const multipipe = require('multipipe'); + +/** + * Waits for the given ReadableStream + */ +export function waitFor(stream: NodeJS.ReadableStream): Promise { + return new Promise((resolve, reject) => { + stream.on('end', resolve); + stream.on('error', reject); + }); +} + +/** + * Waits for all the given ReadableStreams + */ +export function waitForAll(streams: NodeJS.ReadableStream[]): Promise { + return Promise.all(streams.map((s) => waitFor(s))); +} + +/** + * Composes multiple streams (or Transforms) into one. + */ +export function compose(streams: NodeJS.ReadWriteStream[]) { + if (streams && streams.length > 0) { + return multipipe(streams); + } else { + return new PassThrough({objectMode: true}); + } +} + +/** + * Forks a stream of Vinyl files, cloning each file before emitting on the fork. + */ +export class ForkedVinylStream extends Readable { + + input: NodeJS.ReadableStream; + + constructor(input: NodeJS.ReadableStream) { + super({objectMode: true}); + this.input = input; + input.on('data', (file) => { + this.push(file.clone({deep: true, contents: true})); + }); + input.on('end', () => { + this.push(null); + }); + input.on('error', (e) => { + this.emit('error', e); + }); + } + + _read(size: number) { + // apparently no-op is fine, but this method is required, + // see: https://nodejs.org/api/stream.html#stream_readable_read_size_1 + } +} diff --git a/src/sw-precache.ts b/src/sw-precache.ts new file mode 100644 index 00000000..f7fb6c0c --- /dev/null +++ b/src/sw-precache.ts @@ -0,0 +1,136 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +import * as fs from 'fs'; +import * as path from 'path'; +import * as logging from 'plylog'; +import {PassThrough} from 'stream'; +import File = require('vinyl'); + +// non-ES compatible modules +const swPrecache = require('sw-precache'); +const Module = require('module'); +let logger = logging.getLogger('cli.build.sw-precache'); + +function writeSWPrecache(destinationPath: string, config: SWConfig): Promise { + config.logger = logger.debug; + return swPrecache.write(destinationPath, config); +} + +export interface SWConfig { + cacheId?: string; + directoryIndex?: string; + dynamicUrlToDependencies?: { + [property: string]: string[] + }; + handleFetch?: boolean; + ignoreUrlParametersMatching?: RegExp[]; + importScripts?: string[]; + logger?: Function; + maximumFileSizeToCacheInBytes?: number; + navigateFallback?: string; + navigateFallbackWhitelist?: RegExp[]; + replacePrefix?: string; + runtimeCaching?: { + urlPattern: RegExp; + handler: string; + options?: { + cache: { + maxEntries: number; + name: string; + }; + }; + }[]; + staticFileGlobs?: string[]; + stripPrefix?: string; + templateFilePath?: string; + verbose?: boolean; +} + +export function generateServiceWorker(options: GenerateServiceWorkerOptions) +: Promise { + logger.debug(`generateServiceWorker() options:`, options); + let swConfig = options.swConfig || {}; + // strip root prefix, so buildRoot prefix can be added safely + let scriptsAndImports = options.deps; + if (options.scriptAndStyleDeps) { + scriptsAndImports = scriptsAndImports.concat(options.scriptAndStyleDeps); + } + let deps = scriptsAndImports.map((p) => { + if (p.startsWith(options.root)) { + return p.substring(options.root.length); + } + return p; + }); + let mainHtml = options.entrypoint.substring(options.root.length); + let precacheFiles = new Set(swConfig.staticFileGlobs); + deps.forEach((p) => precacheFiles.add(p)); + precacheFiles.add(mainHtml); + + let precacheList = Array.from(precacheFiles); + precacheList = precacheList.map((p) => path.join(options.buildRoot, p)); + + // swPrecache will determine the right urls by stripping buildRoot + swConfig.stripPrefix = options.buildRoot; + // static files will be pre-cached + swConfig.staticFileGlobs = precacheList; + + logger.debug(`writing service worker to ${options.serviceWorkerPath}`, swConfig); + return writeSWPrecache(options.serviceWorkerPath, swConfig); +} + +export function parsePreCacheConfig(configFile: string): Promise { + return new Promise((resolve, reject) => { + fs.stat(configFile, (err) => { + let config: SWConfig; + // only log if the config file exists at all + if (!err) { + try { + config = require(configFile); + } catch (e) { + logger.warn(`${configFile} file was found but could not be loaded`, {err}); + } + } + resolve(config); + }); + }); +} + +export interface GenerateServiceWorkerOptions { + /** + * folder containing files to be served by the service worker. + */ + root: string; + /** + * Main file to serve from service worker. + */ + entrypoint: string; + /** + * Output folder for the service worker bundle + */ + buildRoot: string; + /** + * File path of the output service worker file. + */ + serviceWorkerPath: string; + /** + * List of files to be cached by the service worker, + * in addition to files found in `swConfig.staticFileGlobs` + */ + deps: string[]; + /** + * List of script and style dependencies. + */ + scriptAndStyleDeps: string[]; + /** + * Existing config to use as a base for the serivce worker generation. + */ + swConfig?: SWConfig; +} diff --git a/src/uglify-transform.ts b/src/uglify-transform.ts new file mode 100644 index 00000000..a58d6da8 --- /dev/null +++ b/src/uglify-transform.ts @@ -0,0 +1,38 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +import {Transform} from 'stream'; +import * as uglify from 'uglify-js'; +import * as logging from 'plylog'; +import File = require('vinyl'); + +let logger = logging.getLogger('cli.build.uglify'); +const UglifyOptions: uglify.MinifyOptions = { fromString: true }; + +export class UglifyTransform extends Transform { + + constructor() { + super({objectMode: true}); + } + + _transform(file: File, encoding: string, callback: (error?, data?) => void): void { + if (file.contents && file.path.endsWith('.js')) { + try { + let contents = file.contents.toString(); + contents = uglify.minify(contents, UglifyOptions).code; + file.contents = new Buffer(contents); + } catch (err) { + logger.warn(`Unable to uglify file ${file.path}`); + logger.debug(err); + } + } + callback(null, file); + } +}; diff --git a/src/url-from-path.ts b/src/url-from-path.ts new file mode 100644 index 00000000..fdf1143f --- /dev/null +++ b/src/url-from-path.ts @@ -0,0 +1,57 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +/** + * CODE ADAPTED FROM THE "SLASH" LIBRARY BY SINDRE SORHUS + * https://github.com/sindresorhus/slash + * + * ORIGINAL LICENSE: + * The MIT License (MIT) + * + * Copyright (c) Sindre Sorhus (sindresorhus.com)* + * + * Permission is hereby granted, free of charge, to any person obtaining a copy* + * of this software and associated documentation files (the "Software"), to deal* + * in the Software without restriction, including without limitation the rights* + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell* + * copies of the Software, and to permit persons to whom the Software is* + * furnished to do so, subject to the following conditions:* + * + * The above copyright notice and this permission notice shall be included in* + * all copies or substantial portions of the Software.* + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR* + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,* + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE* + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER* + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,* + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN* + * THE SOFTWARE. + */ + +import * as path from 'path'; + +export default function urlFromPath(root, filepath) { + console.log(root, filepath); + if (!filepath.startsWith(root)) { + throw new Error(`file path is not in root: ${filepath} (${root})`); + } + + // On windows systems, convert filesystem path to URL by replacing slashes + let isPlatformWin = /^win/.test(process.platform); + let isExtendedLengthPath = /^\\\\\?\\/.test(filepath); + let hasNonAscii = /[^\x00-\x80]+/.test(filepath); + if (isPlatformWin && !isExtendedLengthPath && !hasNonAscii) { + return path.win32.relative(root, filepath).replace(/\\/g, '/'); + } + + // Otherwise, just return the relative path between the two + return path.relative(root, filepath); +} \ No newline at end of file diff --git a/test/.eslintrc.json b/test/.eslintrc.json new file mode 100644 index 00000000..6f010965 --- /dev/null +++ b/test/.eslintrc.json @@ -0,0 +1,13 @@ +{ + "extends": "eslint:recommended", + "ecmaVersion": 6, + "env": { + "mocha": true, + "node": true, + "es6": true + }, + "rules": { + "comma-dangle": [2, "always-multiline"], + "no-unused-vars": [2, { "vars": "all", "args": "none" }] + } +} diff --git a/test/analyzer-data/a.html b/test/analyzer-data/a.html new file mode 100644 index 00000000..d75de395 --- /dev/null +++ b/test/analyzer-data/a.html @@ -0,0 +1,2 @@ + + diff --git a/test/analyzer-data/b.html b/test/analyzer-data/b.html new file mode 100644 index 00000000..d75de395 --- /dev/null +++ b/test/analyzer-data/b.html @@ -0,0 +1,2 @@ + + diff --git a/test/analyzer-data/entrypoint.html b/test/analyzer-data/entrypoint.html new file mode 100644 index 00000000..7bd5e04d --- /dev/null +++ b/test/analyzer-data/entrypoint.html @@ -0,0 +1 @@ + diff --git a/test/analyzer-data/shared-1.html b/test/analyzer-data/shared-1.html new file mode 100644 index 00000000..80a7e7db --- /dev/null +++ b/test/analyzer-data/shared-1.html @@ -0,0 +1 @@ + diff --git a/test/analyzer-data/shared-2.html b/test/analyzer-data/shared-2.html new file mode 100644 index 00000000..e43860d3 --- /dev/null +++ b/test/analyzer-data/shared-2.html @@ -0,0 +1 @@ + diff --git a/test/analyzer-data/shell.html b/test/analyzer-data/shell.html new file mode 100644 index 00000000..80a7e7db --- /dev/null +++ b/test/analyzer-data/shell.html @@ -0,0 +1 @@ + diff --git a/test/analyzer_test.js b/test/analyzer_test.js new file mode 100644 index 00000000..8c7436e5 --- /dev/null +++ b/test/analyzer_test.js @@ -0,0 +1,61 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +'use strict'; + +const assert = require('chai').assert; +const path = require('path'); +const StreamAnalyzer = require('../lib/analyzer').StreamAnalyzer; +const vfs = require('vinyl-fs-fake'); + +suite('Analyzer', () => { + + suite('DepsIndex', () => { + + test('fragment to deps list has only uniques', (done) => { + let root = path.resolve('test/analyzer-data'); + let analyzer = new StreamAnalyzer(root, null, null, [ + path.resolve(root, 'a.html'), + path.resolve(root, 'b.html'), + ]); + vfs.src(path.join(root, '**'), {cwdbase: true}) + .pipe(analyzer) + .on('finish', () => { + analyzer.analyze.then((depsIndex) => { + let ftd = depsIndex.fragmentToDeps; + for (let frag of ftd.keys()) { + assert.deepEqual(ftd.get(frag), ['shared-1.html', 'shared-2.html']); + } + done(); + }).catch((err) => done(err)); + }); + }); + + test("analyzing shell and entrypoint doesn't double load files", (done) => { + let root = path.resolve('test/analyzer-data'); + let analyzer = new StreamAnalyzer( + root, + path.resolve(root, 'entrypoint.html'), + path.resolve(root, 'shell.html')); + vfs.src(root + '/**', {cwdbase: true}) + .pipe(analyzer) + .on('finish', () => { + analyzer.analyze.then((depsIndex) => { + assert.isTrue(depsIndex.depsToFragments.has('shared-2.html')); + assert.isFalse(depsIndex.depsToFragments.has('/shell.html')); + assert.isFalse(depsIndex.depsToFragments.has('/shared-2.html')); + done(); + }).catch((err) => done(err)); + }); + }); + + }); + +}); diff --git a/test/bundle_test.js b/test/bundle_test.js new file mode 100644 index 00000000..ce0cfdd0 --- /dev/null +++ b/test/bundle_test.js @@ -0,0 +1,269 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +'use strict'; + +const assert = require('chai').assert; +const dom5 = require('dom5'); +const File = require('vinyl'); +const path = require('path'); +const stream = require('stream'); + +const analyzer = require('../lib/analyzer'); +const bundle = require('../lib/bundle'); + +const Bundler = bundle.Bundler; +const StreamAnalyzer = analyzer.StreamAnalyzer; + +const root = path.resolve('/root'); + +suite('Bundler', () => { + + let bundler; + let sourceStream; + let bundledStream; + let files; + + let setupTest = (options) => new Promise((resolve, reject) => { + let fragments = options.fragments + ? options.fragments.map((f) => path.resolve(root, f)) + : []; + let entrypoint = options.entrypoint + && path.resolve(root, options.entrypoint); + let shell = options.shell + && path.resolve(root, options.shell); + let analyzer = new StreamAnalyzer(root, entrypoint, shell, fragments); + bundler = new Bundler(root, entrypoint, shell, fragments, analyzer); + sourceStream = new stream.Readable({ + objectMode: true, + }); + bundledStream = sourceStream + .pipe(analyzer) + .pipe(bundler); + files = new Map(); + bundledStream.on('data', (file) => { + files.set(file.path, file); + }); + bundledStream.on('end', (data) => { + resolve(files); + }); + bundledStream.on('error', (err) => { + reject(err); + }); + pushFiles(options.files); + }); + + teardown(() => { + bundler = null; + sourceStream = null; + bundledStream = null; + files = null; + }); + + let pushFiles = (files) => { + files.forEach((f) => sourceStream.push(f)); + sourceStream.push(null); + }; + + let getFile = (filename) => { + // we're getting FS paths, so add root + let file = files.get(path.resolve(root, filename)); + return file && file.contents && file.contents.toString(); + } + + let hasMarker = (doc, id) => { + let marker = dom5.query(doc, + dom5.predicates.AND( + dom5.predicates.hasTagName('div'), + dom5.predicates.hasAttrValue('id', id) + )); + return marker != null; + }; + + let hasImport = (doc, url) => { + let link = dom5.query(doc, + dom5.predicates.AND( + dom5.predicates.hasTagName('link'), + dom5.predicates.hasAttrValue('rel', 'import'), + dom5.predicates.hasAttrValue('href', url) + )); + return link != null; + }; + + test('entrypoint only', () => setupTest({ + entrypoint: 'entrypointA.html', + files: [framework(), entrypointA()], + }).then((files) => { + let doc = dom5.parse(getFile('entrypointA.html')); + assert.isTrue(hasMarker(doc, 'framework')); + assert.isFalse(hasImport(doc, 'framework.html')); + // TODO(justinfagnani): check that shared-bundle.html doesn't exist + // it's in the analyzer's file map for some reason + })); + + test('two fragments', () => setupTest({ + fragments: ['shell.html', 'entrypointA.html'], + files: [framework(), shell(), entrypointA()], + }).then((files) => { + // shell doesn't import framework + let shellDoc = dom5.parse(getFile('shell.html')); + assert.isFalse(hasMarker(shellDoc, 'framework')); + assert.isFalse(hasImport(shellDoc, 'framework.html')); + + // entrypoint doesn't import framework + let entrypointDoc = dom5.parse(getFile('entrypointA.html')); + assert.isFalse(hasMarker(entrypointDoc, 'framework')); + assert.isFalse(hasImport(entrypointDoc, 'framework.html')); + + // No shared-bundle bundles framework + let sharedDoc = dom5.parse(getFile('shared-bundle.html')); + assert.isTrue(hasMarker(sharedDoc, 'framework')); + assert.isFalse(hasImport(sharedDoc, 'framework.html')); + + // fragments import shared-bundle + assert.isTrue(hasImport(entrypointDoc, 'shared-bundle.html')); + assert.isTrue(hasImport(shellDoc, 'shared-bundle.html')); + })); + + test.skip('shell and entrypoint', () => setupTest({ + entrypoint: '/root/entrypointA.html', + shell: '/root/shell.html', + files: [framework(), shell(), entrypointA()], + }).then((files) => { + // shell bundles framework + let shellDoc = dom5.parse(getFile('shell.html')); + assert.isTrue(hasMarker(shellDoc, 'framework')); + assert.isFalse(hasImport(shellDoc, '/root/framework.html')); + + // entrypoint doesn't import framework + let entrypointDoc = dom5.parse(getFile('entrypointA.html')); + assert.isFalse(hasMarker(entrypointDoc, 'framework')); + assert.isFalse(hasImport(entrypointDoc, '/root/framework.html')); + + // entrypoint imports shell + assert.isTrue(hasImport(entrypointDoc, 'shell.html')); + + // No shared-bundle with a shell + assert.isNotOk(getFile('shared-bundle.html')); + })); + + test('shell and fragments with shared dependency', () => setupTest({ + shell: 'shell.html', + fragments: ['entrypointB.html', 'entrypointC.html'], + files: [framework(), shell(), entrypointB(), entrypointC(), commonDep()], + }).then((files) => { + // shell bundles framework + let shellDoc = dom5.parse(getFile('shell.html')); + assert.isTrue(hasMarker(shellDoc, 'framework')); + assert.isFalse(hasImport(shellDoc, 'framework.html')); + + // shell bundles commonDep + assert.isTrue(hasMarker(shellDoc, 'commonDep')); + assert.isFalse(hasImport(shellDoc, 'commonDep.html')); + + // entrypoint B doesn't import commonDep + let entrypointBDoc = dom5.parse(getFile('entrypointB.html')); + assert.isFalse(hasMarker(entrypointBDoc, 'commonDep')); + assert.isFalse(hasImport(entrypointBDoc, 'commonDep.html')); + + // entrypoint C doesn't import commonDep + let entrypointCDoc = dom5.parse(getFile('entrypointC.html')); + assert.isFalse(hasMarker(entrypointCDoc, 'commonDep')); + assert.isFalse(hasImport(entrypointCDoc, 'commonDep.html')); + + // entrypoints import shell + assert.isTrue(hasImport(entrypointBDoc, 'shell.html')); + assert.isTrue(hasImport(entrypointCDoc, 'shell.html')); + + // No shared-bundle with a shell + assert.isNotOk(getFile('shared-bundle.html')); + })); + + test.skip('entrypoint and fragments', () => setupTest({ + entrypoint: '/root/entrypointA.html', + fragments: ['/root/shell.html', '/root/entrypointB.html', '/root/entrypointC.html'], + files: [framework(), shell(), entrypointA(), entrypointB(), entrypointC(), commonDep()], + }).then((files) => { + // shared bundle was emitted + let bundle = getFile('shared-bundle.html'); + assert.ok(bundle); + let bundleDoc = dom5.parse(bundle); + + // shared-bundle bundles framework + assert.isTrue(hasMarker(bundleDoc, 'framework')); + assert.isFalse(hasImport(bundleDoc, '/root/framework.html')); + + // shared-bundle bundles commonDep + assert.isTrue(hasMarker(bundleDoc, 'commonDep')); + assert.isFalse(hasImport(bundleDoc, '/root/commonDep.html')); + + // entrypoint doesn't import framework + let entrypointDoc = dom5.parse(getFile('entrypointA.html')); + assert.isFalse(hasMarker(entrypointDoc, 'framework')); + assert.isFalse(hasImport(entrypointDoc, '/root/framework.html')); + + // shell doesn't import framework + let shellDoc = dom5.parse(getFile('entrypointA.html')); + assert.isFalse(hasMarker(shellDoc, 'framework')); + assert.isFalse(hasImport(shellDoc, '/root/framework.html')); + + // entrypoint B doesn't import commonDep + let entrypointBDoc = dom5.parse(getFile('entrypointB.html')); + assert.isFalse(hasMarker(entrypointBDoc, 'commonDep')); + assert.isFalse(hasImport(entrypointBDoc, '/root/commonDep.html')); + + // entrypoint C doesn't import commonDep + let entrypointCDoc = dom5.parse(getFile('entrypointC.html')); + assert.isFalse(hasMarker(entrypointCDoc, 'commonDep')); + assert.isFalse(hasImport(entrypointCDoc, '/root/commonDep.html')); + + // entrypoint and fragments import shared-bundle + assert.isTrue(hasImport(entrypointDoc, 'shared-bundle.html')); + assert.isTrue(hasImport(entrypointBDoc, 'shared-bundle.html')); + assert.isTrue(hasImport(entrypointCDoc, 'shared-bundle.html')); + assert.isTrue(hasImport(shellDoc, 'shared-bundle.html')); + })); + +}); + +const F = (filename, contents) => new File({ + cwd: root, + base: root, + path: path.resolve(root, filename), + contents: new Buffer(contents), +}); + +const framework = () => F('framework.html', ` +
+`); + +const shell = () => F('shell.html', ` + +
+`); + +const entrypointA = () => F('entrypointA.html', ` + +
+`); + +const entrypointB = () => F('entrypointB.html', ` + +
+`); + +const entrypointC = () => F('entrypointC.html', ` + +
+`); + +const commonDep = () => F('commonDep.html', ` +
+`); diff --git a/test/html-project_test.js b/test/html-project_test.js new file mode 100644 index 00000000..a0761fea --- /dev/null +++ b/test/html-project_test.js @@ -0,0 +1,58 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +'use strict'; + +const assert = require('chai').assert; +const path = require('path'); +const stream = require('stream'); +const File = require('vinyl'); + +const HtmlProject = require('../lib/html-project').HtmlProject; + +suite('HtmlProject', () => { + + test('deals with bad paths', (done) => { + let project = new HtmlProject(); + let sourceStream = new stream.Readable({ + objectMode: true, + }); + let root = path.normalize('/foo'); + let filepath = path.join(root, '/bar/baz.html'); + let source = + ''; + let file = new File({ + cwd: root, + base: root, + path: filepath, + contents: new Buffer(source), + }); + + sourceStream + .pipe(project.split) + .on('data', (file) => { + // this is what gulp-html-minifier does... + if (path.sep === '\\' && file.path.endsWith('.html')) { + file.path = file.path.replace('\\', '/'); + } + }) + .pipe(project.rejoin) + .on('data', (file) => { + let contents = file.contents.toString(); + assert.equal(contents, source); + }) + .on('finish', () => done()) + .on('error', (error) => done(error)); + + sourceStream.push(file); + sourceStream.push(null); + }); + +}); diff --git a/test/optimize_test.js b/test/optimize_test.js new file mode 100644 index 00000000..46d0af07 --- /dev/null +++ b/test/optimize_test.js @@ -0,0 +1,109 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +'use strict'; + +const assert = require('chai').assert; +const vfs = require('vinyl-fs-fake'); + +const HtmlProject = require('../lib/html-project').HtmlProject; +const optimize = require('..//lib/optimize').optimize; + +suite('optimize()', () => { + + function testStream(stream, cb) { + stream.on('data', (data) => { + cb(null, data) + }); + stream.on('error', (err) => cb(err)); + } + + test('css', (done) => { + let stream = vfs.src([ + { + path: 'foo.css', + contents: '/* comment */ selector { property: value; }', + }, + ]); + let op = stream.pipe(optimize({css: {stripWhitespace: true}})); + assert.notEqual(stream, op, 'stream should be wrapped'); + testStream(op, (err, f) => { + if (err) { + return done(err); + } + assert.equal(f.contents.toString(), 'selector{property:value;}'); + done(); + }); + }); + + test('js', (done) => { + let stream = vfs.src([ + { + path: 'foo.js', + contents: 'var foo = 3', + }, + ]); + let op = stream.pipe(optimize({js: {minify: true}})); + assert.notEqual(stream, op); + testStream(op, (err, f) => { + if (err) { + return done(err); + } + assert.equal(f.contents.toString(), 'var foo=3;'); + done(); + }); + }); + + test('all together', (done) => { + let expected = + `bar`; + let stream = vfs.src([ + { + path: 'foo.html', + contents: ` + + + + + bar + + `, + }, + ], {cwdbase: true}); + let options = { + html: { + collapseWhitespace: true, + removeComments: true, + }, + css: { + stripWhitespace: true, + }, + js: { + minify: true, + }, + }; + let project = new HtmlProject(); + let op = stream.pipe(project.split); + op = op.pipe(optimize(options)).pipe(project.rejoin); + testStream(op, (err, f) => { + if (err) { + return done(err); + } + assert.equal(f.contents.toString(), expected); + done(); + }); + }); +}); diff --git a/test/precache-data/config.js b/test/precache-data/config.js new file mode 100644 index 00000000..32e5b16e --- /dev/null +++ b/test/precache-data/config.js @@ -0,0 +1,3 @@ +module.exports = { + staticFileGlobs: ['*'], +} diff --git a/test/precache-data/static/fizz.html b/test/precache-data/static/fizz.html new file mode 100644 index 00000000..f540bc68 --- /dev/null +++ b/test/precache-data/static/fizz.html @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/test/precache-data/static/foo.js b/test/precache-data/static/foo.js new file mode 100644 index 00000000..48684502 --- /dev/null +++ b/test/precache-data/static/foo.js @@ -0,0 +1 @@ +var x = 3; diff --git a/test/precache_test.js b/test/precache_test.js new file mode 100644 index 00000000..89ba6d49 --- /dev/null +++ b/test/precache_test.js @@ -0,0 +1,87 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +'use strict'; + +const assert = require('chai').assert; +const fs = require('fs'); +const path = require('path'); +const temp = require('temp').track(); +const vfs = require('vinyl-fs-fake'); + +const precache = require('../lib/sw-precache'); + +suite('sw-precache', () => { + const configFile = path.resolve(__dirname, 'precache-data', 'config.js'); + suite('parsing', () => { + test('js file', (done) => { + precache.parsePreCacheConfig(configFile).then((config) => { + assert.ok(config); + assert.property(config, 'staticFileGlobs'); + done(); + }) + }); + }); + + suite('generation', () => { + let buildRoot; + setup((done) => { + temp.mkdir('polymer-cli', (err, dir) => { + if (err) { + return done(err); + } + buildRoot = dir; + vfs.src(path.join(__dirname, 'precache-data/static/*')) + .pipe(vfs.dest(dir)) + .on('finish', () => done()); + } + ); + }); + + teardown((done) => { + temp.cleanup(done) + }); + + test('without config', (done) => { + precache.parsePreCacheConfig(path.join(__dirname, 'nope')).then(() => { + precache.generateServiceWorker({ + root: path.resolve(__dirname, 'precache-data/static'), + entrypoint: path.resolve(__dirname, 'precache-data/static/fizz.html'), + buildRoot, + deps: [], + serviceWorkerPath: path.join(buildRoot, 'service-worker.js'), + }).then(() => { + let content = + fs.readFileSync(path.join(buildRoot, 'service-worker.js'), 'utf-8'); + assert.include(content, '/fizz.html', 'entrypoint file should be present'); + done(); + }); + }).catch((err) => done(err)); + }); + + test('with config', (done) => { + precache.parsePreCacheConfig(configFile).then((config) => { + return precache.generateServiceWorker({ + root: path.resolve(__dirname, 'precache-data/static'), + entrypoint: path.resolve(__dirname, 'precache-data/static/fizz.html'), + buildRoot, + deps: [], + swConfig: config, + serviceWorkerPath: path.join(buildRoot, 'service-worker.js'), + }); + }).then(() => { + let content = fs.readFileSync(path.join(buildRoot, 'service-worker.js'), 'utf-8'); + assert.include(content, '/fizz.html', 'entrypoint file should be present'); + assert.include(content, '/foo.js', 'staticFileGlobs should match foo.js'); + done(); + }); + }); + }) +}); diff --git a/test/uglify-transform_test.js b/test/uglify-transform_test.js new file mode 100644 index 00000000..e0db9866 --- /dev/null +++ b/test/uglify-transform_test.js @@ -0,0 +1,67 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +'use strict'; +const assert = require('chai').assert; +const UglifyTransform = require('../lib/uglify-transform').UglifyTransform; +const File = require('vinyl'); + +suite('Uglify Transform', () => { + let ut; + setup(() => { + ut = new UglifyTransform(); + }) + test('always pass through files, even non-js', (done) => { + let f1 = new File({ + cwd: '/foo/bar', + base: '/foo/bar', + path: '/foo/bar/baz.zizz', + contents: null, + }) + ut._transform(f1, 'utf8', (err, data) => { + if (err) { + return done(err) + } + assert.equal(data, f1) + done(); + }); + }); + + test('minify js files', (done) => { + let b = new Buffer('var foo = 3'); + let f1 = new File({ + path: '/foo/bar/baz.js', + contents: b, + }); + ut._transform(f1, 'utf8', (err, data) => { + if (err) { + return done(err); + } + // buffer should be different if uglify processed f1 correctly + assert.notEqual(data.contents.toString(), b.toString()); + done(); + }); + }); + + test('continue on JS errors', (done) => { + let b = new Buffer('####'); + let f1 = new File({ + path: '/foo.js', + contents: b, + }); + ut._transform(f1, 'utf8', (err, data) => { + if (err) { + done(err); + } + assert.equal(b, data.contents); + done(); + }); + }); +}); diff --git a/test/url-from-path_test.js b/test/url-from-path_test.js new file mode 100644 index 00000000..acfc223c --- /dev/null +++ b/test/url-from-path_test.js @@ -0,0 +1,52 @@ +/** + * @license + * Copyright (c) 2016 The Polymer Project Authors. All rights reserved. + * This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt + * The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt + * The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt + * Code distributed by Google as part of the polymer project is also + * subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt + */ + +'use strict'; + +const assert = require('chai').assert; +const urlFromPath = require('../lib/url-from-path').default; + +const WIN_ROOT_PATH = 'C:\\Users\\TEST_USER\\TEST_ROOT'; +const MAC_ROOT_PATH = '/Users/TEST_USER/TEST_ROOT'; +var isPlatformWin = /^win/.test(process.platform); + +suite('urlFromPath()', () => { + + test('throws error when path is not in root', () => { + assert.throws(function() { + urlFromPath(MAC_ROOT_PATH, '/some/other/path/shop-app.html'); + }); + }); + + if (isPlatformWin) { + + test('creates a URL path relative to root when called in a Windows environment', () => { + let shortPath = urlFromPath(WIN_ROOT_PATH, WIN_ROOT_PATH + '\\shop-app.html'); + assert.equal(shortPath, 'shop-app.html'); + let medPath = urlFromPath(WIN_ROOT_PATH, WIN_ROOT_PATH + '\\src\\shop-app.html'); + assert.equal(medPath, 'src/shop-app.html'); + let longPath = urlFromPath(WIN_ROOT_PATH, WIN_ROOT_PATH + '\\bower_components\\app-layout\\docs.html'); + assert.equal(longPath, 'bower_components/app-layout/docs.html'); + }); + + } else { + + test('creates a URL path relative to root when called in a Posix environment', () => { + let shortPath = urlFromPath(MAC_ROOT_PATH, MAC_ROOT_PATH + '/shop-app.html'); + assert.equal(shortPath, 'shop-app.html'); + let medPath = urlFromPath(MAC_ROOT_PATH, MAC_ROOT_PATH + '/src/shop-app.html'); + assert.equal(medPath, 'src/shop-app.html'); + let longPath = urlFromPath(MAC_ROOT_PATH, MAC_ROOT_PATH + '/bower_components/app-layout/docs.html'); + assert.equal(longPath, 'bower_components/app-layout/docs.html'); + }); + + } + +}); diff --git a/tsconfig.json b/tsconfig.json new file mode 100644 index 00000000..a766d5cf --- /dev/null +++ b/tsconfig.json @@ -0,0 +1,48 @@ +{ + "compilerOptions": { + "target": "es6", + "module": "commonjs", + "moduleResolution": "node", + "isolatedModules": true, + "experimentalDecorators": true, + "emitDecoratorMetadata": true, + "declaration": false, + "noImplicitAny": false, + "removeComments": false, + "noLib": true, + "preserveConstEnums": true, + "suppressImplicitAnyIndexErrors": true, + "outDir": "lib" + }, + "filesGlob": [ + "src/**/*.ts", + "!node_modules/**", + "custom_typings/*.d.ts", + "typings/main/*.d.ts", + "typings/main.d.ts", + "node_modules/typescript/lib/lib.core.es6.d.ts" + ], + "files": [ + "src/analyzer.ts", + "src/build.ts", + "src/bundle.ts", + "src/html-project.ts", + "src/optimize.ts", + "src/polymer-build.ts", + "src/prefetch.ts", + "src/streams.ts", + "src/sw-precache.ts", + "src/uglify-transform.ts", + "src/url-from-path.ts", + "custom_typings/fs-extra.d.ts", + "custom_typings/hydrolysis.d.ts", + "custom_typings/node.d.ts", + "custom_typings/plylog.d.ts", + "custom_typings/slash.d.ts", + "custom_typings/vinyl-fs.d.ts", + "node_modules/typescript/lib/lib.core.es6.d.ts" + ], + "atom": { + "rewriteTsconfig": true + } +} diff --git a/tslint.json b/tslint.json new file mode 100644 index 00000000..23598c6b --- /dev/null +++ b/tslint.json @@ -0,0 +1,58 @@ +{ + "rules": { + "class-name": true, + "indent": [ + true, + "spaces" + ], + "no-duplicate-variable": true, + "no-eval": true, + "no-internal-module": true, + "no-trailing-whitespace": true, + "no-var-keyword": true, + "one-line": [ + true, + "check-open-brace", + "check-whitespace" + ], + "quotemark": [ + true, + "single", + "avoid-escape" + ], + "semicolon": [ + true, + "always" + ], + "trailing-comma": [ + true, + "multiline" + ], + "triple-equals": [ + true, + "allow-null-check" + ], + "typedef-whitespace": [ + true, + { + "call-signature": "nospace", + "index-signature": "nospace", + "parameter": "nospace", + "property-declaration": "nospace", + "variable-declaration": "nospace" + } + ], + "variable-name": [ + true, + "ban-keywords" + ], + "whitespace": [ + true, + "check-branch", + "check-decl", + "check-operator", + "check-separator", + "check-type" + ] + } +} diff --git a/typings.json b/typings.json new file mode 100644 index 00000000..476b9a6e --- /dev/null +++ b/typings.json @@ -0,0 +1,22 @@ +{ + "globalDependencies": { + "glob": "registry:dt/glob#5.0.10+20160317120654", + "glob-stream": "registry:dt/glob-stream#3.1.12+20160316155526", + "gulp": "registry:dt/gulp#3.8.0+20160316155526", + "gulp-if": "registry:dt/gulp-if#0.0.0+20160316155526", + "gulp-util": "registry:dt/gulp-util#0.0.0+20160316155526", + "merge-stream": "registry:dt/merge-stream#1.0.0+20160313224417", + "mime": "registry:dt/mime#0.0.0+20160316155526", + "minimatch": "registry:dt/minimatch#2.0.8+20160317120654", + "node": "registry:dt/node#4.0.0+20160412142033", + "source-map": "registry:dt/source-map#0.0.0+20160317120654", + "uglify-js": "registry:dt/uglify-js#2.6.1+20160316155526", + "vinyl": "registry:dt/vinyl#1.1.0+20160316155526", + "vinyl-fs": "registry:dt/vinyl-fs#0.0.0+20160317120654" + }, + "dependencies": { + "chalk": "registry:npm/chalk#1.0.0+20160211003958", + "orchestrator": "registry:npm/orchestrator#0.3.7+20160211003958", + "through2": "registry:npm/through2#2.0.0+20160308004148" + } +}