From 8a62241a5439413de8006463bc1b56fcd1c5ad4a Mon Sep 17 00:00:00 2001 From: AJ Ortega Date: Mon, 16 May 2016 23:37:46 -0700 Subject: [PATCH 1/2] add script precache --- .gitignore | 1 + custom_typings/hydrolysis.d.ts | 3 ++ src/build/analyzer.ts | 66 ++++++++++++++++++++++++++++------ src/build/build.ts | 10 ++++-- src/build/sw-precache.ts | 10 +++++- 5 files changed, 77 insertions(+), 13 deletions(-) diff --git a/.gitignore b/.gitignore index 90123b61..1be24338 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ .DS_Store +.vscode /node_modules/ /lib/ /typings/ diff --git a/custom_typings/hydrolysis.d.ts b/custom_typings/hydrolysis.d.ts index d5b6259f..d7277ad8 100644 --- a/custom_typings/hydrolysis.d.ts +++ b/custom_typings/hydrolysis.d.ts @@ -1,4 +1,5 @@ declare module 'hydrolysis' { + import {Node} from 'dom5'; interface Options { filter?: (path: string) => boolean; } @@ -79,6 +80,8 @@ declare module 'hydrolysis' { annotate(): void; elements: Element[]; behaviors: Behavior[]; + html: {[path: string]: AnalyzedDocument}; + parsedDocuments: {[path: string]: Node}; load(href: string):Promise; diff --git a/src/build/analyzer.ts b/src/build/analyzer.ts index e92bff07..ae99ef18 100644 --- a/src/build/analyzer.ts +++ b/src/build/analyzer.ts @@ -15,10 +15,17 @@ import {Transform} from 'stream'; import File = require('vinyl'); import {parse as parseUrl} from 'url'; import * as logging from 'plylog'; +import {Node, queryAll, predicates, getAttribute} from 'dom5'; const minimatchAll = require('minimatch-all'); let logger = logging.getLogger('cli.build.analyzer'); +export interface DocumentDeps{ + imports?: Array, + scripts: Array, + styles: Array +} + export class StreamAnalyzer extends Transform { root: string; @@ -113,29 +120,32 @@ export class StreamAnalyzer extends Transform { _getDepsToEntrypointIndex(): Promise { // TODO: tsc is being really weird here... - let depsPromises = []>this.allFragments.map( + let depsPromises = []>this.allFragments.map( (e) => this._getDependencies(e)); return Promise.all(depsPromises).then((value: any) => { // tsc was giving a spurious error with `allDeps` as the parameter - let allDeps: string[][] = value; + let allDeps: DocumentDeps[] = value; // An index of dependency -> fragments that depend on it let depsToFragments = new Map(); // An index of fragments -> dependencies let fragmentToDeps = new Map(); + + let fragmentToFullDeps = new Map(); console.assert(this.allFragments.length === allDeps.length); for (let i = 0; i < allDeps.length; i++) { let fragment = this.allFragments[i]; - let deps: string[] = allDeps[i]; + let deps: DocumentDeps = allDeps[i]; console.assert(deps != null, `deps is null for ${fragment}`); - fragmentToDeps.set(fragment, deps); + fragmentToDeps.set(fragment, deps.imports); + fragmentToFullDeps.set(fragment, deps); - for (let dep of deps) { + for (let dep of deps.imports) { let entrypointList; if (!depsToFragments.has(dep)) { entrypointList = []; @@ -149,16 +159,37 @@ export class StreamAnalyzer extends Transform { return { depsToFragments, fragmentToDeps, + fragmentToFullDeps, }; }); } + _collectScriptsAndStyles(tree: Node): DocumentDeps { + const externalScriptPredicate = predicates.AND( + predicates.hasTagName('script') + ); + const externalStylePredicate = predicates.AND( + predicates.hasTagName('style'), + predicates.hasAttrValue('rel', 'stylesheet'), + predicates.hasAttr('href') + ) + let scriptNodes = queryAll(tree, externalScriptPredicate); + let styleNodes = queryAll(tree, externalStylePredicate); + let scripts: string[] = scriptNodes.map((s) => s.__hydrolysisInlined).filter((s) => !!s); + let styles = styleNodes.map((s) => getAttribute(s, 'href')); + return { + scripts, + styles + } + } /** * Attempts to retreive document-order transitive dependencies for `url`. */ - _getDependencies(url: string): Promise { + _getDependencies(url: string): Promise { let visited = new Set(); - let allDeps = new Set(); + let allHtmlDeps = new Set(); + let allScriptDeps = new Set(); + let allStyleDeps = new Set(); // async depth-first traversal: waits for document load, then async // iterates on dependencies. No return values are used, writes to visited // and list. @@ -166,8 +197,16 @@ export class StreamAnalyzer extends Transform { // document.depHrefs is _probably_ document order, if all html imports are // at the same level in the tree. // See: https://github.com/Polymer/hydrolysis/issues/240 + let documents = this.analyzer.parsedDocuments; let _getDeps = (url: string) => - this.analyzer.load(url).then((d) => _iterate(d.depHrefs.values())); + this.analyzer.load(url).then((d) => { + let document = documents[d.href]; + let dir = path.dirname(url); + let deps: DocumentDeps = this._collectScriptsAndStyles(document); + deps.scripts.forEach(s => allScriptDeps.add(path.resolve(dir, s))); + deps.styles.forEach(s => allStyleDeps.add(path.resolve(dir, s))); + return _iterate(d.depHrefs.values()); + }); // async iteration: waits for _getDeps on a value to return before // recursing to call _getDeps on the next value. @@ -176,19 +215,26 @@ export class StreamAnalyzer extends Transform { if (next.done || visited.has(next.value)) { return Promise.resolve(); } else { - allDeps.add(next.value); + allHtmlDeps.add(next.value); visited.add(url); return _getDeps(next.value).then(() => _iterate(iterator)); } } // kick off the traversal from root, then resolve the list of dependencies - return _getDeps(url).then(() => Array.from(allDeps)); + return _getDeps(url).then(() => { + return { + imports: Array.from(allHtmlDeps), + scripts: Array.from(allScriptDeps), + styles: Array.from(allStyleDeps), + } + }); } } export interface DepsIndex { depsToFragments: Map; fragmentToDeps: Map; + fragmentToFullDeps: Map; } class StreamResolver implements Resolver { diff --git a/src/build/build.ts b/src/build/build.ts index 758891ac..38b84182 100644 --- a/src/build/build.ts +++ b/src/build/build.ts @@ -155,12 +155,14 @@ export function build(options?: BuildOptions, config?: ProjectConfig): Promise { + let genSW = (buildRoot: string, deps: string[], swConfig: SWConfig, scriptDeps?: string[]) => { logger.debug(`Generating service worker for ${buildRoot}...`); + logger.info(`Script deps: ${scriptDeps}`); return generateServiceWorker({ root, entrypoint, deps, + scriptDeps, buildRoot, swConfig: clone(swConfig), serviceWorkerPath: path.join(root, buildRoot, serviceWorkerName) @@ -172,6 +174,10 @@ export function build(options?: BuildOptions, config?: ProjectConfig): Promise { let unbundledDeps = analyzer.allFragments .concat(Array.from(depsIndex.depsToFragments.keys())); + + let fullDeps = Array.from(depsIndex.fragmentToFullDeps.values()); + let scriptDeps = new Set(); + fullDeps.forEach(d => d.scripts.forEach(s => scriptDeps.add(s))); let bundledDeps = analyzer.allFragments .concat(bundler.sharedBundleUrl); @@ -185,7 +191,7 @@ export function build(options?: BuildOptions, config?: ProjectConfig): Promise{}; // strip root prefix, so buildRoot prefix can be added safely - let deps = options.deps.map((p) => { + let scriptsAndImports = options.deps; + if (options.scriptDeps) { + scriptsAndImports = scriptsAndImports.concat(options.scriptDeps); + } + let deps = scriptsAndImports.map((p) => { if (p.startsWith(options.root)) { return p.substring(options.root.length); } @@ -121,6 +125,10 @@ export interface GenerateServiceWorkerOptions { * in addition to files found in `swConfig.staticFileGlobs` */ deps: string[]; + /** + * List of script dependencies. + */ + scriptDeps: string[]; /** * Existing config to use as a base for the serivce worker generation. */ From f9dbad74b5fae32605a6d9373681e1cd6d1bbf28 Mon Sep 17 00:00:00 2001 From: AJ Ortega Date: Tue, 17 May 2016 10:39:11 -0700 Subject: [PATCH 2/2] remove async loop --- custom_typings/hydrolysis.d.ts | 8 ++- src/build/analyzer.ts | 116 ++++++++++++++++----------------- src/build/build.ts | 17 +++-- src/build/sw-precache.ts | 8 +-- 4 files changed, 76 insertions(+), 73 deletions(-) diff --git a/custom_typings/hydrolysis.d.ts b/custom_typings/hydrolysis.d.ts index d7277ad8..c0f3479b 100644 --- a/custom_typings/hydrolysis.d.ts +++ b/custom_typings/hydrolysis.d.ts @@ -39,7 +39,11 @@ declare module 'hydrolysis' { // parsedScript?: estree.Program; - // html?: ParsedImport; + html?: { + script: Node[], + style: Node[], + ast: Node + }; } /** @@ -76,7 +80,7 @@ declare module 'hydrolysis' { constructor(attachAST: boolean, loader: Loader); - metadataTree(path: string): Promise; + metadataTree(path: string): Promise; annotate(): void; elements: Element[]; behaviors: Behavior[]; diff --git a/src/build/analyzer.ts b/src/build/analyzer.ts index ae99ef18..78a04ba8 100644 --- a/src/build/analyzer.ts +++ b/src/build/analyzer.ts @@ -9,7 +9,7 @@ */ import * as fs from 'fs'; -import {Analyzer, Deferred, Loader, Resolver} from 'hydrolysis'; +import {Analyzer, Deferred, Loader, Resolver, DocumentDescriptor} from 'hydrolysis'; import * as path from 'path'; import {Transform} from 'stream'; import File = require('vinyl'); @@ -22,8 +22,8 @@ let logger = logging.getLogger('cli.build.analyzer'); export interface DocumentDeps{ imports?: Array, - scripts: Array, - styles: Array + scripts?: Array, + styles?: Array } export class StreamAnalyzer extends Transform { @@ -125,14 +125,14 @@ export class StreamAnalyzer extends Transform { return Promise.all(depsPromises).then((value: any) => { // tsc was giving a spurious error with `allDeps` as the parameter - let allDeps: DocumentDeps[] = value; + let allDeps: DocumentDeps[] = value; // An index of dependency -> fragments that depend on it let depsToFragments = new Map(); // An index of fragments -> dependencies let fragmentToDeps = new Map(); - + let fragmentToFullDeps = new Map(); console.assert(this.allFragments.length === allDeps.length); @@ -163,77 +163,73 @@ export class StreamAnalyzer extends Transform { }; }); } - _collectScriptsAndStyles(tree: Node): DocumentDeps { - const externalScriptPredicate = predicates.AND( - predicates.hasTagName('script') - ); - const externalStylePredicate = predicates.AND( - predicates.hasTagName('style'), - predicates.hasAttrValue('rel', 'stylesheet'), - predicates.hasAttr('href') - ) - let scriptNodes = queryAll(tree, externalScriptPredicate); - let styleNodes = queryAll(tree, externalStylePredicate); - let scripts: string[] = scriptNodes.map((s) => s.__hydrolysisInlined).filter((s) => !!s); - let styles = styleNodes.map((s) => getAttribute(s, 'href')); - return { - scripts, - styles - } - } - /** * Attempts to retreive document-order transitive dependencies for `url`. */ _getDependencies(url: string): Promise { - let visited = new Set(); - let allHtmlDeps = new Set(); + let documents = this.analyzer.parsedDocuments; + let dir = path.dirname(url); + return this.analyzer.metadataTree(url) + .then((tree) => this._getDependenciesFromDescriptor(tree, dir)); + } + + _getDependenciesFromDescriptor(descriptor: DocumentDescriptor, dir: string): DocumentDeps { + let allHtmlDeps = []; let allScriptDeps = new Set(); let allStyleDeps = new Set(); - // async depth-first traversal: waits for document load, then async - // iterates on dependencies. No return values are used, writes to visited - // and list. - // - // document.depHrefs is _probably_ document order, if all html imports are - // at the same level in the tree. - // See: https://github.com/Polymer/hydrolysis/issues/240 - let documents = this.analyzer.parsedDocuments; - let _getDeps = (url: string) => - this.analyzer.load(url).then((d) => { - let document = documents[d.href]; - let dir = path.dirname(url); - let deps: DocumentDeps = this._collectScriptsAndStyles(document); - deps.scripts.forEach(s => allScriptDeps.add(path.resolve(dir, s))); - deps.styles.forEach(s => allStyleDeps.add(path.resolve(dir, s))); - return _iterate(d.depHrefs.values()); - }); - - // async iteration: waits for _getDeps on a value to return before - // recursing to call _getDeps on the next value. - let _iterate = (iterator: Iterator) => { - let next = iterator.next(); - if (next.done || visited.has(next.value)) { - return Promise.resolve(); - } else { - allHtmlDeps.add(next.value); - visited.add(url); - return _getDeps(next.value).then(() => _iterate(iterator)); + + let deps: DocumentDeps = this._collectScriptsAndStyles(descriptor); + deps.scripts.forEach((s) => allScriptDeps.add(path.resolve(dir, s))); + deps.styles.forEach((s) => allStyleDeps.add(path.resolve(dir, s))); + if (descriptor.imports) { + let queue = descriptor.imports.slice(); + while (queue.length > 0) { + let next = queue.shift(); + if (!next.href) { + continue; + } + allHtmlDeps.push(next.href); + let childDeps = this._getDependenciesFromDescriptor(next, path.dirname(next.href)); + allHtmlDeps = allHtmlDeps.concat(childDeps.imports); + childDeps.scripts.forEach((s) => allScriptDeps.add(s)); + childDeps.styles.forEach((s) => allStyleDeps.add(s)); } } - // kick off the traversal from root, then resolve the list of dependencies - return _getDeps(url).then(() => { - return { - imports: Array.from(allHtmlDeps), - scripts: Array.from(allScriptDeps), - styles: Array.from(allStyleDeps), + + return { + scripts: Array.from(allScriptDeps), + styles: Array.from(allStyleDeps), + imports: allHtmlDeps, + }; + } + + _collectScriptsAndStyles(tree: DocumentDescriptor): DocumentDeps { + let scripts = []; + let styles = []; + tree.html.script.forEach((script) => { + if (script['__hydrolysisInlined']) { + scripts.push(script['__hydrolysisInlined']); } }); + tree.html.style.forEach((style) => { + let href = getAttribute(style, 'href'); + if (href) { + styles.push(href); + } + }); + return { + scripts, + styles + } } } export interface DepsIndex { depsToFragments: Map; + // TODO(garlicnation): Remove this map. + // A legacy map from framents to html dependencies. fragmentToDeps: Map; + // A map from frament urls to html, js, and css dependencies. fragmentToFullDeps: Map; } diff --git a/src/build/build.ts b/src/build/build.ts index 38b84182..507507eb 100644 --- a/src/build/build.ts +++ b/src/build/build.ts @@ -155,14 +155,14 @@ export function build(options?: BuildOptions, config?: ProjectConfig): Promise { + let genSW = (buildRoot: string, deps: string[], swConfig: SWConfig, scriptAndStyleDeps?: string[]) => { logger.debug(`Generating service worker for ${buildRoot}...`); - logger.info(`Script deps: ${scriptDeps}`); + logger.debug(`Script and style deps: ${scriptAndStyleDeps}`); return generateServiceWorker({ root, entrypoint, deps, - scriptDeps, + scriptAndStyleDeps, buildRoot, swConfig: clone(swConfig), serviceWorkerPath: path.join(root, buildRoot, serviceWorkerName) @@ -174,10 +174,13 @@ export function build(options?: BuildOptions, config?: ProjectConfig): Promise { let unbundledDeps = analyzer.allFragments .concat(Array.from(depsIndex.depsToFragments.keys())); - + let fullDeps = Array.from(depsIndex.fragmentToFullDeps.values()); - let scriptDeps = new Set(); - fullDeps.forEach(d => d.scripts.forEach(s => scriptDeps.add(s))); + let scriptAndStyleDeps = new Set(); + fullDeps.forEach(d => { + d.scripts.forEach((s) => scriptAndStyleDeps.add(s)); + d.styles.forEach((s) => scriptAndStyleDeps.add(s)); + }); let bundledDeps = analyzer.allFragments .concat(bundler.sharedBundleUrl); @@ -191,7 +194,7 @@ export function build(options?: BuildOptions, config?: ProjectConfig): Promise{}; // strip root prefix, so buildRoot prefix can be added safely let scriptsAndImports = options.deps; - if (options.scriptDeps) { - scriptsAndImports = scriptsAndImports.concat(options.scriptDeps); + if (options.scriptAndStyleDeps) { + scriptsAndImports = scriptsAndImports.concat(options.scriptAndStyleDeps); } let deps = scriptsAndImports.map((p) => { if (p.startsWith(options.root)) { @@ -126,9 +126,9 @@ export interface GenerateServiceWorkerOptions { */ deps: string[]; /** - * List of script dependencies. + * List of script and style dependencies. */ - scriptDeps: string[]; + scriptAndStyleDeps: string[]; /** * Existing config to use as a base for the serivce worker generation. */