Skip to content

Commit

Permalink
option #4
Browse files Browse the repository at this point in the history
  • Loading branch information
patrickhulce committed Apr 20, 2018
1 parent 29b3409 commit 7f449c1
Show file tree
Hide file tree
Showing 3 changed files with 102 additions and 69 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,14 @@
const Audit = require('../audit');
const Node = require('../../lib/dependency-graph/node');
const CPUNode = require('../../lib/dependency-graph/cpu-node');
const ByteEfficiencyAudit = require('../byte-efficiency/byte-efficiency-audit');
const ByteEfficiencyAudit = require('./byte-efficiency-audit');
const UnusedCSS = require('./unused-css-rules');

// Because of the way we detect blocking stylesheets, asynchronously loaded
// CSS with link[rel=preload] and an onload handler (see https://github.com/filamentgroup/loadCSS)
// can be falsely flagged as blocking. Therefore, ignore stylesheets that loaded fast enough
// to possibly be non-blocking (and they have minimal impact anyway).
const MINIMUM_DOWNLOAD_TIME_IN_MS = 50;
const MINIMUM_WASTED_MS = 50;

const keyByUrl = arr => arr.reduce((map, node) => {
map[node.record && node.record.url] = node;
Expand All @@ -33,13 +34,13 @@ class RenderBlockingResources extends Audit {
static get meta() {
return {
name: 'render-blocking-resources',
description: 'Reduce render-blocking resources',
description: 'Eliminate render-blocking resources',
informative: true,
scoreDisplayMode: Audit.SCORING_MODES.NUMERIC,
helpText: 'Resources are blocking the first paint of your page. Consider ' +
'delivering critical JS/CSS inline and deferring all non-critical ' +
'JS/styles. [Learn more](https://developers.google.com/web/tools/lighthouse/audits/blocking-resources).',
requiredArtifacts: ['TagsBlockingFirstPaint', 'traces'],
requiredArtifacts: ['CSSUsage', 'URL', 'TagsBlockingFirstPaint', 'traces'],
};
}

Expand All @@ -53,29 +54,39 @@ class RenderBlockingResources extends Audit {
const simulatorData = {devtoolsLog, settings: context.settings};
const traceOfTab = await artifacts.requestTraceOfTab(trace);
const simulator = await artifacts.requestLoadSimulator(simulatorData);
const wastedBytesMap = await RenderBlockingResources.computeWastedCSSBytes(artifacts, context);

const metricSettings = {throttlingMethod: 'simulate'};
const metricComputationData = {trace, devtoolsLog, simulator, settings: metricSettings};
const fcpSimulation = await artifacts.requestFirstContentfulPaint(metricComputationData);
const fcpTsInMs = traceOfTab.timestamps.firstContentfulPaint / 1000;

const nodeTimingMap = fcpSimulation.pessimisticEstimate.nodeTiming;
const nodeTimingMap = fcpSimulation.optimisticEstimate.nodeTiming;
const nodesByUrl = keyByUrl(Array.from(nodeTimingMap.keys()));

const results = [];
const deferredNodeIds = new Set();
for (const resource of artifacts.TagsBlockingFirstPaint) {
// Ignore any resources that finished after observed FCP (they're clearly not render-blocking)
if (resource.endTime * 1000 > fcpTsInMs) continue;
if ((resource.endTime - resource.startTime) * 1000 < MINIMUM_DOWNLOAD_TIME_IN_MS) continue;

const node = nodesByUrl[resource.tag.url];
const nodeTiming = nodeTimingMap.get(node);
// TODO(phulce): beacon these occurences to Sentry to improve FCP graph
if (!node) continue;

const nodeTiming = nodeTimingMap.get(node);
// Mark this node and all it's dependents as deferrable
// TODO(phulce): make this slightly more surgical
// i.e. the referenced font asset won't become inlined just because you inline the CSS
node.traverse(node => deferredNodeIds.add(node.id));

const wastedMs = nodeTiming.endTime - nodeTiming.startTime;
if (wastedMs < MINIMUM_WASTED_MS) continue;

results.push({
url: resource.tag.url,
totalBytes: resource.transferSize,
wastedMs: nodeTiming.endTime - nodeTiming.startTime,
wastedMs,
});
}

Expand All @@ -85,58 +96,65 @@ class RenderBlockingResources extends Audit {

const wastedMs = RenderBlockingResources.estimateSavingsFromInlining(
simulator,
fcpSimulation.pessimisticGraph,
fcpSimulation.pessimisticEstimate.timeInMs
fcpSimulation.optimisticGraph,
deferredNodeIds,
wastedBytesMap
);

return {results, wastedMs};
}

/**
* @param {Simulator} simulator
* @param {Node} fcpGraph
* @param {Set<string>} deferredIds
* @param {Map<string, number>} wastedBytesMap
* @return {number}
*/
static estimateSavingsFromInlining(simulator, fcpGraph) {
static estimateSavingsFromInlining(simulator, fcpGraph, deferredIds, wastedBytesMap) {
const originalEstimate = simulator.simulate(fcpGraph).timeInMs;

let earliestCpuTs = Infinity;
let totalChildCpuTime = 0;
let totalChildNetworkBytes = 0;
const graphWithoutChildren = fcpGraph.cloneWithRelationships(node => {
// Node is root node, this is the only one we're keeping
if (node === fcpGraph) return true;

// Node is network node, we're dropping and pretending we're inlining
if (node.type === Node.TYPES.NETWORK) {
totalChildNetworkBytes += node.record.transferSize;
return false;
const willDefer = deferredIds.has(node.id);
if (willDefer && node.type === Node.TYPES.NETWORK &&
node.record._resourceType === WebInspector.resourceTypes.Stylesheet) {
const wastedBytes = wastedBytesMap.get(node.record.url) || 0;
totalChildNetworkBytes += node.record._transferSize - wastedBytes;
}

// Node is CPU node we're dropping and merging into one mega CPU task
if (node.type === Node.TYPES.CPU) {
earliestCpuTs = node.event.ts;
totalChildCpuTime += node.event.dur;
return false;
}
// Include all nodes that couldn't be deferred
return !willDefer;
});

if (totalChildCpuTime) {
const fakeChildEvents = [];
const fakeEvent = {pid: 1, tid: 1, ts: earliestCpuTs, dur: totalChildCpuTime};
const fakeCpuNode = new CPUNode(fakeEvent, fakeChildEvents);
graphWithoutChildren.addDependent(fakeCpuNode);
}

graphWithoutChildren.record._transferSize += totalChildNetworkBytes;
const estimateAfterInline = simulator.simulate(graphWithoutChildren).timeInMs;
const estimateAfterInlineA = simulator.simulate(graphWithoutChildren);
const estimateAfterInline = estimateAfterInlineA.timeInMs;
graphWithoutChildren.record._transferSize -= totalChildNetworkBytes;
return Math.max(originalEstimate - estimateAfterInline, 0);
}

/**
* @param {!Artifacts} artifacts
* @param {LH.Audit.Context} context
* @return {!AuditResult}
* @return {Map<string, number>}
*/
static async computeWastedCSSBytes(artifacts, context) {
const wastedBytesByUrl = new Map();
try {
const results = await UnusedCSS.audit(artifacts, context);
for (const item of results.details.items) {
wastedBytesByUrl.set(item.url, item.wastedBytes);
}
} catch (_) {}

return wastedBytesByUrl;
}

/**
* @param {!Artifacts} artifacts
* @param {LH.Audit.Context} context
* @return {AuditResult}
*/
static async audit(artifacts, context) {
const {results, wastedMs} = await RenderBlockingResources.computeResults(artifacts, context);
Expand Down
2 changes: 1 addition & 1 deletion lighthouse-core/config/default-config.js
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,7 @@ module.exports = {
'byte-efficiency/uses-long-cache-ttl',
'byte-efficiency/total-byte-weight',
'byte-efficiency/offscreen-images',
'byte-efficiency/render-blocking-resources',
'byte-efficiency/unminified-css',
'byte-efficiency/unminified-javascript',
'byte-efficiency/unused-css-rules',
Expand All @@ -163,7 +164,6 @@ module.exports = {
'dobetterweb/dom-size',
'dobetterweb/external-anchors-use-rel-noopener',
'dobetterweb/geolocation-on-start',
'dobetterweb/render-blocking-resources',
'dobetterweb/no-document-write',
'dobetterweb/no-mutation-events',
'dobetterweb/no-vulnerable-libraries',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,7 @@
*/
'use strict';

const RenderBlockingResourcesAudit =
require('../../../audits/dobetterweb/render-blocking-resources.js');
const RenderBlockingResourcesAudit = require('../../../audits/byte-efficiency/render-blocking-resources.js');

const mobile3G = require('../../../config/constants').throttling.mobile3G;
const Runner = require('../../../runner');
Expand All @@ -22,16 +21,19 @@ const devtoolsLog = require('../../fixtures/traces/progressive-app-m60.devtools.

describe('Render blocking resources audit', () => {
it('evaluates http2 input correctly', async () => {
const artifacts = Object.assign({
traces: {defaultPass: trace},
devtoolsLogs: {defaultPass: devtoolsLog},
TagsBlockingFirstPaint: [
{
tag: {url: 'https://pwa.rocks/script.js'},
transferSize: 621,
},
],
}, Runner.instantiateComputedArtifacts());
const artifacts = Object.assign(
{
traces: {defaultPass: trace},
devtoolsLogs: {defaultPass: devtoolsLog},
TagsBlockingFirstPaint: [
{
tag: {url: 'https://pwa.rocks/script.js'},
transferSize: 621,
},
],
},
Runner.instantiateComputedArtifacts()
);

const settings = {throttlingMethod: 'simulate', throttling: mobile3G};
const result = await RenderBlockingResourcesAudit.audit(artifacts, {settings});
Expand All @@ -42,42 +44,55 @@ describe('Render blocking resources audit', () => {
describe('#estimateSavingsFromInlining', () => {
const estimate = RenderBlockingResourcesAudit.estimateSavingsFromInlining;

let requestId = 1;
const record = props => {
const ret = Object.assign({parsedURL: {}, requestId: requestId++}, props);
Object.defineProperty(ret, 'transferSize', {
get() {
return ret._transferSize;
},
});
return ret;
};
let requestId;
let record;

it('computes savings from inling', () => {
beforeEach(() => {
requestId = 1;
record = props => {
const ret = Object.assign({parsedURL: {}, requestId: requestId++}, props);
Object.defineProperty(ret, 'transferSize', {
get() {
return ret._transferSize;
},
});
return ret;
};
});

it('computes savings from deferring', () => {
const serverResponseTimeByOrigin = new Map([['undefined://undefined', 100]]);
const simulator = new Simulator({rtt: 1000, serverResponseTimeByOrigin});
const documentNode = new NetworkNode(record({_transferSize: 4000}));
const styleNode = new NetworkNode(record({_transferSize: 3000}));
const scriptNode = new NetworkNode(record({_transferSize: 1000}));
const scriptExecution = new CPUNode({dur: 50 * 1000}, []);
const scriptExecution = new CPUNode({tid: 1, ts: 1, dur: 50 * 1000}, []);
const deferredIds = new Set([2, 3]);
const wastedBytesMap = new Map();

documentNode.addDependent(scriptNode);
documentNode.addDependent(styleNode);
scriptNode.addDependent(scriptExecution);
const result = estimate(simulator, documentNode);
documentNode.addDependent(scriptExecution);
const result = estimate(simulator, documentNode, deferredIds, wastedBytesMap);
// Saving 1000 + 1000 + 100ms for TCP handshake + request/response + server response time
assert.equal(result, 2100);
// -200 ms for the CPU task that becomes new bottleneck
assert.equal(result, 1900);
});

it('computes savings from inlining when new RT required', () => {
it('computes savings from inlining', () => {
const serverResponseTimeByOrigin = new Map([['undefined://undefined', 100]]);
const simulator = new Simulator({rtt: 1000, serverResponseTimeByOrigin});
const documentNode = new NetworkNode(record({_transferSize: 10000}));
const styleNode = new NetworkNode(record({_transferSize: 13000})); // pushes document over 14KB
const documentNode = new NetworkNode(record({_transferSize: 10 * 1000}));
const styleNode = new NetworkNode(
record({_transferSize: 23 * 1000, _resourceType: WebInspector.resourceTypes.Stylesheet})
); // pushes document over 14KB
const deferredIds = new Set([2]);
const wastedBytesMap = new Map([[undefined, 18 * 1000]]);
documentNode.addDependent(styleNode);
const result = estimate(simulator, documentNode);
// Saving 1000 + 100ms for TCP handshake + server response time, response RT still required
assert.equal(result, 1100);

const result = estimate(simulator, documentNode, deferredIds, wastedBytesMap);
// Saving 1000 + 1000 + 100ms for TCP handshake + 1 RT savings + server response time
assert.equal(result, 2100);
});
});
});

0 comments on commit 7f449c1

Please sign in to comment.