Skip to content
This repository has been archived by the owner on Dec 7, 2021. It is now read-only.

Commit

Permalink
Merge pull request #152 from Polymer/precacheScriptsStyles
Browse files Browse the repository at this point in the history
add script precache
  • Loading branch information
justinfagnani committed May 17, 2016
2 parents e525dc6 + f9dbad7 commit 136cfde
Show file tree
Hide file tree
Showing 5 changed files with 104 additions and 37 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
.DS_Store
.vscode
/node_modules/
/lib/
/typings/
11 changes: 9 additions & 2 deletions custom_typings/hydrolysis.d.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
declare module 'hydrolysis' {
import {Node} from 'dom5';
interface Options {
filter?: (path: string) => boolean;
}
Expand Down Expand Up @@ -38,7 +39,11 @@ declare module 'hydrolysis' {

// parsedScript?: estree.Program;

// html?: ParsedImport;
html?: {
script: Node[],
style: Node[],
ast: Node
};
}

/**
Expand Down Expand Up @@ -75,10 +80,12 @@ declare module 'hydrolysis' {

constructor(attachAST: boolean, loader: Loader);

metadataTree(path: string): Promise<void>;
metadataTree(path: string): Promise<DocumentDescriptor>;
annotate(): void;
elements: Element[];
behaviors: Behavior[];
html: {[path: string]: AnalyzedDocument};
parsedDocuments: {[path: string]: Node};

load(href: string):Promise<AnalyzedDocument>;

Expand Down
106 changes: 74 additions & 32 deletions src/build/analyzer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,16 +9,23 @@
*/

import * as fs from 'fs';
import {Analyzer, Deferred, Loader, Resolver} from 'hydrolysis';
import {Analyzer, Deferred, Loader, Resolver, DocumentDescriptor} from 'hydrolysis';
import * as path from 'path';
import {Transform} from 'stream';
import File = require('vinyl');
import {parse as parseUrl} from 'url';
import * as logging from 'plylog';
import {Node, queryAll, predicates, getAttribute} from 'dom5';

const minimatchAll = require('minimatch-all');
let logger = logging.getLogger('cli.build.analyzer');

export interface DocumentDeps{
imports?: Array<string>,
scripts?: Array<string>,
styles?: Array<string>
}

export class StreamAnalyzer extends Transform {

root: string;
Expand Down Expand Up @@ -113,29 +120,32 @@ export class StreamAnalyzer extends Transform {

_getDepsToEntrypointIndex(): Promise<DepsIndex> {
// TODO: tsc is being really weird here...
let depsPromises = <Promise<string[]>[]>this.allFragments.map(
let depsPromises = <Promise<DocumentDeps>[]>this.allFragments.map(
(e) => this._getDependencies(e));

return Promise.all(depsPromises).then((value: any) => {
// tsc was giving a spurious error with `allDeps` as the parameter
let allDeps: string[][] = <string[][]>value;
let allDeps: DocumentDeps[] = <DocumentDeps[]>value;

// An index of dependency -> fragments that depend on it
let depsToFragments = new Map<string, string[]>();

// An index of fragments -> dependencies
let fragmentToDeps = new Map<string, string[]>();

let fragmentToFullDeps = new Map<string, DocumentDeps>();

console.assert(this.allFragments.length === allDeps.length);

for (let i = 0; i < allDeps.length; i++) {
let fragment = this.allFragments[i];
let deps: string[] = allDeps[i];
let deps: DocumentDeps = allDeps[i];
console.assert(deps != null, `deps is null for ${fragment}`);

fragmentToDeps.set(fragment, deps);
fragmentToDeps.set(fragment, deps.imports);
fragmentToFullDeps.set(fragment, deps);

for (let dep of deps) {
for (let dep of deps.imports) {
let entrypointList;
if (!depsToFragments.has(dep)) {
entrypointList = [];
Expand All @@ -149,46 +159,78 @@ export class StreamAnalyzer extends Transform {
return {
depsToFragments,
fragmentToDeps,
fragmentToFullDeps,
};
});
}

/**
* Attempts to retreive document-order transitive dependencies for `url`.
*/
_getDependencies(url: string): Promise<string[]> {
let visited = new Set<string>();
let allDeps = new Set<string>();
// async depth-first traversal: waits for document load, then async
// iterates on dependencies. No return values are used, writes to visited
// and list.
//
// document.depHrefs is _probably_ document order, if all html imports are
// at the same level in the tree.
// See: https://github.com/Polymer/hydrolysis/issues/240
let _getDeps = (url: string) =>
this.analyzer.load(url).then((d) => _iterate(d.depHrefs.values()));

// async iteration: waits for _getDeps on a value to return before
// recursing to call _getDeps on the next value.
let _iterate = (iterator: Iterator<string>) => {
let next = iterator.next();
if (next.done || visited.has(next.value)) {
return Promise.resolve();
} else {
allDeps.add(next.value);
visited.add(url);
return _getDeps(next.value).then(() => _iterate(iterator));
_getDependencies(url: string): Promise<DocumentDeps> {
let documents = this.analyzer.parsedDocuments;
let dir = path.dirname(url);
return this.analyzer.metadataTree(url)
.then((tree) => this._getDependenciesFromDescriptor(tree, dir));
}

_getDependenciesFromDescriptor(descriptor: DocumentDescriptor, dir: string): DocumentDeps {
let allHtmlDeps = [];
let allScriptDeps = new Set<string>();
let allStyleDeps = new Set<string>();

let deps: DocumentDeps = this._collectScriptsAndStyles(descriptor);
deps.scripts.forEach((s) => allScriptDeps.add(path.resolve(dir, s)));
deps.styles.forEach((s) => allStyleDeps.add(path.resolve(dir, s)));
if (descriptor.imports) {
let queue = descriptor.imports.slice();
while (queue.length > 0) {
let next = queue.shift();
if (!next.href) {
continue;
}
allHtmlDeps.push(next.href);
let childDeps = this._getDependenciesFromDescriptor(next, path.dirname(next.href));
allHtmlDeps = allHtmlDeps.concat(childDeps.imports);
childDeps.scripts.forEach((s) => allScriptDeps.add(s));
childDeps.styles.forEach((s) => allStyleDeps.add(s));
}
}
// kick off the traversal from root, then resolve the list of dependencies
return _getDeps(url).then(() => Array.from(allDeps));

return {
scripts: Array.from(allScriptDeps),
styles: Array.from(allStyleDeps),
imports: allHtmlDeps,
};
}

_collectScriptsAndStyles(tree: DocumentDescriptor): DocumentDeps {
let scripts = [];
let styles = [];
tree.html.script.forEach((script) => {
if (script['__hydrolysisInlined']) {
scripts.push(script['__hydrolysisInlined']);
}
});
tree.html.style.forEach((style) => {
let href = getAttribute(style, 'href');
if (href) {
styles.push(href);
}
});
return {
scripts,
styles
}
}
}

export interface DepsIndex {
depsToFragments: Map<string, string[]>;
// TODO(garlicnation): Remove this map.
// A legacy map from framents to html dependencies.
fragmentToDeps: Map<string, string[]>;
// A map from frament urls to html, js, and css dependencies.
fragmentToFullDeps: Map<string, DocumentDeps>;
}

class StreamResolver implements Resolver {
Expand Down
13 changes: 11 additions & 2 deletions src/build/build.ts
Original file line number Diff line number Diff line change
Expand Up @@ -155,12 +155,14 @@ export function build(options?: BuildOptions, config?: ProjectConfig): Promise<a
.pipe(bundler)
.pipe(vfs.dest('build/bundled'));

let genSW = (buildRoot: string, deps: string[], swConfig: SWConfig) => {
let genSW = (buildRoot: string, deps: string[], swConfig: SWConfig, scriptAndStyleDeps?: string[]) => {
logger.debug(`Generating service worker for ${buildRoot}...`);
logger.debug(`Script and style deps: ${scriptAndStyleDeps}`);
return generateServiceWorker({
root,
entrypoint,
deps,
scriptAndStyleDeps,
buildRoot,
swConfig: clone(swConfig),
serviceWorkerPath: path.join(root, buildRoot, serviceWorkerName)
Expand All @@ -173,6 +175,13 @@ export function build(options?: BuildOptions, config?: ProjectConfig): Promise<a
let unbundledDeps = analyzer.allFragments
.concat(Array.from(depsIndex.depsToFragments.keys()));

let fullDeps = Array.from(depsIndex.fragmentToFullDeps.values());
let scriptAndStyleDeps = new Set<string>();
fullDeps.forEach(d => {
d.scripts.forEach((s) => scriptAndStyleDeps.add(s));
d.styles.forEach((s) => scriptAndStyleDeps.add(s));
});

let bundledDeps = analyzer.allFragments
.concat(bundler.sharedBundleUrl);

Expand All @@ -185,7 +194,7 @@ export function build(options?: BuildOptions, config?: ProjectConfig): Promise<a

logger.info(`Generating service workers...`);
return Promise.all([
genSW('build/unbundled', unbundledDeps, swConfig),
genSW('build/unbundled', unbundledDeps, swConfig, Array.from(scriptAndStyleDeps)),
genSW('build/bundled', bundledDeps, swConfig)
]);
})
Expand Down
10 changes: 9 additions & 1 deletion src/build/sw-precache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,11 @@ export function generateServiceWorker(options: GenerateServiceWorkerOptions)
logger.debug(`generateServiceWorker() options:`, options);
let swConfig = options.swConfig || <SWConfig>{};
// strip root prefix, so buildRoot prefix can be added safely
let deps = options.deps.map((p) => {
let scriptsAndImports = options.deps;
if (options.scriptAndStyleDeps) {
scriptsAndImports = scriptsAndImports.concat(options.scriptAndStyleDeps);
}
let deps = scriptsAndImports.map((p) => {
if (p.startsWith(options.root)) {
return p.substring(options.root.length);
}
Expand Down Expand Up @@ -121,6 +125,10 @@ export interface GenerateServiceWorkerOptions {
* in addition to files found in `swConfig.staticFileGlobs`
*/
deps: string[];
/**
* List of script and style dependencies.
*/
scriptAndStyleDeps: string[];
/**
* Existing config to use as a base for the serivce worker generation.
*/
Expand Down

0 comments on commit 136cfde

Please sign in to comment.