diff --git a/lighthouse-core/audits/predictive-perf.js b/lighthouse-core/audits/predictive-perf.js index d281d8b40cbb..f19b50a880b9 100644 --- a/lighthouse-core/audits/predictive-perf.js +++ b/lighthouse-core/audits/predictive-perf.js @@ -8,6 +8,7 @@ const Audit = require('./audit'); const Util = require('../report/v2/renderer/util.js'); const PageDependencyGraph = require('../gather/computed/page-dependency-graph.js'); +const Node = require('../gather/computed/dependency-graph/node.js'); // Parameters (in ms) for log-normal CDF scoring. To see the curve: // https://www.desmos.com/calculator/rjp0lbit8y @@ -28,6 +29,52 @@ class PredictivePerf extends Audit { }; } + /** + * @param {!Node} graph + * @param {!TraceOfTabArtifact} traceOfTab + * @return {!Node} + */ + static getOptimisticFMPGraph(graph, traceOfTab) { + const fmp = traceOfTab.timestamps.firstMeaningfulPaint; + return graph.cloneWithRelationships(node => { + if (node.endTime > fmp) return false; + if (node.type !== Node.TYPES.NETWORK) return true; + return node.record.priority() === 'VeryHigh'; // proxy for render-blocking + }); + } + + /** + * @param {!Node} graph + * @param {!TraceOfTabArtifact} traceOfTab + * @return {!Node} + */ + static getPessimisticFMPGraph(graph, traceOfTab) { + const fmp = traceOfTab.timestamps.firstMeaningfulPaint; + return graph.cloneWithRelationships(node => { + return node.endTime <= fmp; + }); + } + + /** + * @param {!Node} graph + * @return {!Node} + */ + static getOptimisticTTCIGraph(graph) { + return graph.cloneWithRelationships(node => { + return node.record._resourceType && node.record._resourceType._name === 'script' || + node.record.priority() === 'High' || + node.record.priority() === 'VeryHigh'; + }); + } + + /** + * @param {!Node} graph + * @return {!Node} + */ + static getPessimisticTTCIGraph(graph) { + return graph; + } + /** * @param {!Artifacts} artifacts * @return {!AuditResult} @@ -35,8 +82,25 @@ class PredictivePerf extends Audit { static audit(artifacts) { const trace = artifacts.traces[Audit.DEFAULT_PASS]; const devtoolsLogs = artifacts.devtoolsLogs[Audit.DEFAULT_PASS]; - return artifacts.requestPageDependencyGraph(trace, devtoolsLogs).then(graph => { - const rawValue = PageDependencyGraph.computeGraphDuration(graph); + return Promise.all([ + artifacts.requestPageDependencyGraph(trace, devtoolsLogs), + artifacts.requestTraceOfTab(trace), + ]).then(([graph, traceOfTab]) => { + const graphs = { + optimisticFMP: PredictivePerf.getOptimisticFMPGraph(graph, traceOfTab), + pessimisticFMP: PredictivePerf.getPessimisticFMPGraph(graph, traceOfTab), + optimisticTTCI: PredictivePerf.getOptimisticTTCIGraph(graph, traceOfTab), + pessimisticTTCI: PredictivePerf.getPessimisticTTCIGraph(graph, traceOfTab), + }; + + let sum = 0; + const values = {}; + Object.keys(graphs).forEach(key => { + values[key] = PageDependencyGraph.computeGraphDuration(graphs[key]); + sum += values[key]; + }); + + const rawValue = sum / 4; const score = Audit.computeLogNormalScore( rawValue, SCORING_POINT_OF_DIMINISHING_RETURNS, @@ -47,6 +111,7 @@ class PredictivePerf extends Audit { score, rawValue, displayValue: Util.formatMilliseconds(rawValue), + extendedInfo: {value: values}, }; }); } diff --git a/lighthouse-core/gather/computed/dependency-graph/estimator/estimator.js b/lighthouse-core/gather/computed/dependency-graph/estimator/estimator.js new file mode 100644 index 000000000000..6a52b92113a3 --- /dev/null +++ b/lighthouse-core/gather/computed/dependency-graph/estimator/estimator.js @@ -0,0 +1,284 @@ +/** + * @license Copyright 2017 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + */ +'use strict'; + +const Node = require('../node'); +const TcpConnection = require('./tcp-connection'); + +// see https://cs.chromium.org/search/?q=kDefaultMaxNumDelayableRequestsPerClient&sq=package:chromium&type=cs +const DEFAULT_MAXIMUM_CONCURRENT_REQUESTS = 10; +const DEFAULT_RESPONSE_TIME = 30; +const DEFAULT_RTT = 150; +const DEFAULT_THROUGHPUT = 1600 * 1024; // 1.6 Mbps + +function groupBy(items, keyFunc) { + const grouped = new Map(); + items.forEach(item => { + const key = keyFunc(item); + const group = grouped.get(key) || []; + group.push(item); + grouped.set(key, group); + }); + + return grouped; +} + +class Estimator { + /** + * @param {!Node} graph + * @param {{rtt: number, throughput: number, defaultResponseTime: number, + * maximumConcurrentRequests: number}=} options + */ + constructor(graph, options) { + this._graph = graph; + this._options = Object.assign( + { + rtt: DEFAULT_RTT, + throughput: DEFAULT_THROUGHPUT, + defaultResponseTime: DEFAULT_RESPONSE_TIME, + maximumConcurrentRequests: DEFAULT_MAXIMUM_CONCURRENT_REQUESTS, + }, + options + ); + + this._rtt = this._options.rtt; + this._throughput = this._options.throughput; + this._defaultResponseTime = this._options.defaultResponseTime; + this._maximumConcurrentRequests = Math.min( + TcpConnection.maximumSaturatedConnections(this._rtt, this._throughput), + this._options.maximumConcurrentRequests + ); + } + + /** + * @param {!WebInspector.NetworkRequest} record + * @return {number} + */ + static getResponseTime(record) { + const timing = record._timing; + return (timing && timing.receiveHeadersEnd - timing.sendEnd) || Infinity; + } + + _initializeNetworkRecords() { + const records = []; + + this._graph.getRootNode().traverse(node => { + if (node.type === Node.TYPES.NETWORK) { + records.push(node.record); + } + }); + + this._networkRecords = records; + return records; + } + + _initializeNetworkConnections() { + const connections = new Map(); + const recordsByConnection = groupBy( + this._networkRecords, + record => record.connectionId + ); + + for (const [connectionId, records] of recordsByConnection.entries()) { + const isSsl = records[0].parsedURL.scheme === 'https'; + let responseTime = records.reduce( + (min, record) => Math.min(min, Estimator.getResponseTime(record)), + Infinity + ); + + if (!Number.isFinite(responseTime)) { + responseTime = this._defaultResponseTime; + } + + const connection = new TcpConnection( + this._rtt, + this._throughput, + responseTime, + isSsl + ); + + connections.set(connectionId, connection); + } + + this._connections = connections; + return connections; + } + + _initializeAuxiliaryData() { + this._nodeAuxiliaryData = new Map(); + this._nodesCompleted = new Set(); + this._nodesInProcess = new Set(); + this._nodesInQueue = new Set(); // TODO: replace this with priority queue + this._connectionsInUse = new Set(); + } + + /** + * @param {!Node} node + */ + _enqueueNodeIfPossible(node) { + const dependencies = node.getDependencies(); + if ( + !this._nodesCompleted.has(node) && + dependencies.every(dependency => this._nodesCompleted.has(dependency)) + ) { + this._nodesInQueue.add(node); + } + } + + /** + * @param {!Node} node + * @param {number} totalElapsedTime + */ + _startNodeIfPossible(node, totalElapsedTime) { + if (node.type !== Node.TYPES.NETWORK) return; + + const connection = this._connections.get(node.record.connectionId); + + if ( + this._nodesInProcess.size >= this._maximumConcurrentRequests || + this._connectionsInUse.has(connection) + ) { + return; + } + + this._nodesInQueue.delete(node); + this._nodesInProcess.add(node); + this._nodeAuxiliaryData.set(node, { + startTime: totalElapsedTime, + timeElapsed: 0, + timeElapsedOvershoot: 0, + bytesDownloaded: 0, + }); + + this._connectionsInUse.add(connection); + } + + _updateNetworkCapacity() { + for (const connection of this._connectionsInUse) { + connection.setThroughput(this._throughput / this._nodesInProcess.size); + } + } + + /** + * @param {!Node} node + * @return {number} + */ + _estimateTimeRemaining(node) { + if (node.type !== Node.TYPES.NETWORK) throw new Error('Unsupported'); + + const auxiliaryData = this._nodeAuxiliaryData.get(node); + const connection = this._connections.get(node.record.connectionId); + const calculation = connection.calculateTimeToDownload( + node.record.transferSize - auxiliaryData.bytesDownloaded, + auxiliaryData.timeElapsed + ); + + const estimate = calculation.timeElapsed + auxiliaryData.timeElapsedOvershoot; + auxiliaryData.estimatedTimeElapsed = estimate; + return estimate; + } + + /** + * @return {number} + */ + _findNextNodeCompletionTime() { + let minimumTime = Infinity; + for (const node of this._nodesInProcess) { + minimumTime = Math.min(minimumTime, this._estimateTimeRemaining(node)); + } + + return minimumTime; + } + + /** + * @param {!Node} node + * @param {number} timePeriodLength + * @param {number} totalElapsedTime + */ + _updateProgressMadeInTimePeriod(node, timePeriodLength, totalElapsedTime) { + if (node.type !== Node.TYPES.NETWORK) throw new Error('Unsupported'); + + const auxiliaryData = this._nodeAuxiliaryData.get(node); + const connection = this._connections.get(node.record.connectionId); + const calculation = connection.calculateTimeToDownload( + node.record.transferSize - auxiliaryData.bytesDownloaded, + auxiliaryData.timeElapsed, + timePeriodLength - auxiliaryData.timeElapsedOvershoot + ); + + connection.setCongestionWindow(calculation.congestionWindow); + + if (auxiliaryData.estimatedTimeElapsed === timePeriodLength) { + auxiliaryData.endTime = totalElapsedTime; + + connection.setWarmed(true); + this._connectionsInUse.delete(connection); + + this._nodesCompleted.add(node); + this._nodesInProcess.delete(node); + + for (const dependent of node.getDependents()) { + this._enqueueNodeIfPossible(dependent); + } + } else { + auxiliaryData.timeElapsed += calculation.timeElapsed; + auxiliaryData.timeElapsedOvershoot += + calculation.timeElapsed - timePeriodLength; + auxiliaryData.bytesDownloaded += calculation.bytesDownloaded; + } + } + + /** + * @return {number} + */ + estimate() { + // initialize all the necessary data containers + this._initializeNetworkRecords(); + this._initializeNetworkConnections(); + this._initializeAuxiliaryData(); + + const nodesInQueue = this._nodesInQueue; + const nodesInProcess = this._nodesInProcess; + + // add root node to queue + nodesInQueue.add(this._graph.getRootNode()); + + let depth = 0; + let totalElapsedTime = 0; + while (nodesInQueue.size || nodesInProcess.size) { + depth++; + + // move all possible queued nodes to in process + for (const node of nodesInQueue) { + this._startNodeIfPossible(node, totalElapsedTime); + } + + // set the available throughput for all connections based on # inflight + this._updateNetworkCapacity(); + + // find the time that the next node will finish + const minimumTime = this._findNextNodeCompletionTime(); + totalElapsedTime += minimumTime; + + // update how far each node will progress until that point + for (const node of nodesInProcess) { + this._updateProgressMadeInTimePeriod( + node, + minimumTime, + totalElapsedTime + ); + } + + if (depth > 10000) { + throw new Error('Maximum depth exceeded: estimate'); + } + } + + return totalElapsedTime; + } +} + +module.exports = Estimator; diff --git a/lighthouse-core/gather/computed/dependency-graph/estimator/tcp-connection.js b/lighthouse-core/gather/computed/dependency-graph/estimator/tcp-connection.js new file mode 100644 index 000000000000..19ca55d80304 --- /dev/null +++ b/lighthouse-core/gather/computed/dependency-graph/estimator/tcp-connection.js @@ -0,0 +1,147 @@ +/** + * @license Copyright 2017 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + */ +'use strict'; + +const INITIAL_CONGESTION_WINDOW = 10; +const TCP_SEGMENT_SIZE = 1460; + +class TcpConnection { + /** + * @param {number} rtt + * @param {number} throughput + * @param {number=} responseTime + * @param {boolean=} ssl + */ + constructor(rtt, throughput, responseTime = 0, ssl = true) { + this._warmed = false; + this._ssl = ssl; + this._rtt = rtt; + this._availableThroughput = throughput; + this._responseTime = responseTime; + this._congestionWindow = INITIAL_CONGESTION_WINDOW; + } + + /** + * @param {number} rtt + * @param {number} availableThroughput + * @return {number} + */ + static maximumSaturatedConnections(rtt, availableThroughput) { + const roundTripsPerSecond = 1000 / rtt; + const bytesPerRoundTrip = TCP_SEGMENT_SIZE; + const bytesPerSecond = roundTripsPerSecond * bytesPerRoundTrip; + const minimumThroughputRequiredPerRequest = bytesPerSecond * 8; + return Math.floor(availableThroughput / minimumThroughputRequiredPerRequest); + } + + /** + * @return {number} + */ + _computeMaximumCongestionWindowInSegments() { + const bytesPerSecond = this._availableThroughput / 8; + const secondsPerRoundTrip = this._rtt / 1000; + const bytesPerRoundTrip = bytesPerSecond * secondsPerRoundTrip; + return Math.floor(bytesPerRoundTrip / TCP_SEGMENT_SIZE); + } + + /** + * @param {number} throughput + */ + setThroughput(throughput) { + this._availableThroughput = throughput; + } + + /** + * @param {number} congestion + */ + setCongestionWindow(congestion) { + this._congestionWindow = congestion; + } + + /** + * @param {boolean} warmed + */ + setWarmed(warmed) { + this._warmed = warmed; + } + + /** + * Simulates a network download of a particular number of bytes over an optional maximum amount of time + * and returns information about the ending state. + * + * See https://hpbn.co/building-blocks-of-tcp/#three-way-handshake and + * https://hpbn.co/transport-layer-security-tls/#tls-handshake for details. + * + * @param {number} bytesToDownload + * @param {number=} timeAlreadyElapsed + * @param {number=} maximumTimeToElapse + * @return {{timeElapsed: number, roundTrips: number, bytesDownloaded: number, congestionWindow: number}} + */ + calculateTimeToDownload(bytesToDownload, timeAlreadyElapsed = 0, maximumTimeToElapse = Infinity) { + const twoWayLatency = this._rtt; + const oneWayLatency = twoWayLatency / 2; + const maximumCongestionWindow = this._computeMaximumCongestionWindowInSegments(); + + let handshakeAndRequest = oneWayLatency; + if (!this._warmed) { + handshakeAndRequest = + // SYN + oneWayLatency + + // SYN ACK + oneWayLatency + + // ACK + Application Data + oneWayLatency + + // ClientHello/ServerHello with TLS False Start + (this._ssl ? twoWayLatency : 0); + } + + let roundTrips = Math.ceil(handshakeAndRequest / twoWayLatency); + const timeToFirstByte = handshakeAndRequest + this._responseTime + oneWayLatency; + const timeElapsedForTTFB = Math.max(timeToFirstByte - timeAlreadyElapsed, 0); + const maximumDownloadTimeToElapse = maximumTimeToElapse - timeElapsedForTTFB; + + let congestionWindow = Math.min(this._congestionWindow, maximumCongestionWindow); + let bytesDownloaded = 0; + if (timeElapsedForTTFB > 0) { + bytesDownloaded = congestionWindow * TCP_SEGMENT_SIZE; + } else { + roundTrips = 0; + } + + let downloadTimeElapsed = 0; + let bytesRemaining = bytesToDownload - bytesDownloaded; + while (bytesRemaining > 0 && downloadTimeElapsed <= maximumDownloadTimeToElapse) { + roundTrips++; + downloadTimeElapsed += twoWayLatency; + congestionWindow = Math.max(Math.min(maximumCongestionWindow, congestionWindow * 2), 1); + + const bytesDownloadedInWindow = congestionWindow * TCP_SEGMENT_SIZE; + bytesDownloaded += bytesDownloadedInWindow; + bytesRemaining -= bytesDownloadedInWindow; + } + + const timeElapsed = timeElapsedForTTFB + downloadTimeElapsed; + bytesDownloaded = Math.min(bytesDownloaded, bytesToDownload); + + if (Number.isFinite(maximumTimeToElapse)) { + return { + roundTrips, + timeElapsed, + bytesDownloaded, + congestionWindow, + }; + } + + return { + roundTrips, + timeElapsed, + bytesDownloaded, + congestionWindow, + }; + } +} + +module.exports = TcpConnection; diff --git a/lighthouse-core/gather/computed/dependency-graph/network-node.js b/lighthouse-core/gather/computed/dependency-graph/network-node.js new file mode 100644 index 000000000000..d5fc2ccf67ae --- /dev/null +++ b/lighthouse-core/gather/computed/dependency-graph/network-node.js @@ -0,0 +1,55 @@ +/** + * @license Copyright 2017 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + */ +'use strict'; + +const Node = require('./node'); + +class NetworkNode extends Node { + /** + * @param {!WebInspector.NetworkRequest} networkRecord + */ + constructor(networkRecord) { + super(networkRecord.requestId); + this._record = networkRecord; + } + + /** + * @return {string} + */ + get type() { + return Node.TYPES.NETWORK; + } + + /** + * @return {number} + */ + get startTime() { + return this._record.startTime * 1000 * 1000; + } + + /** + * @return {number} + */ + get endTime() { + return this._record.endTime * 1000 * 1000; + } + + /** + * @return {!WebInspector.NetworkRequest} + */ + get record() { + return this._record; + } + + /** + * @return {!NetworkNode} + */ + cloneWithoutRelationships() { + return new NetworkNode(this._record); + } +} + +module.exports = NetworkNode; diff --git a/lighthouse-core/gather/computed/dependency-graph/node.js b/lighthouse-core/gather/computed/dependency-graph/node.js index 8f53fb89975f..9d4497f26f73 100644 --- a/lighthouse-core/gather/computed/dependency-graph/node.js +++ b/lighthouse-core/gather/computed/dependency-graph/node.js @@ -6,7 +6,6 @@ 'use strict'; class Node { - /** * @param {string|number} id */ @@ -23,6 +22,27 @@ class Node { return this._id; } + /** + * @return {string} + */ + get type() { + return Node.TYPES.UNKNOWN; + } + + /** + * @return {number} + */ + get startTime() { + throw new Error('Unimplemented'); + } + + /** + * @return {number} + */ + get endTime() { + throw new Error('Unimplemented'); + } + /** * @return {!Array} */ @@ -44,8 +64,14 @@ class Node { */ getRootNode() { let rootNode = this; - while (rootNode._dependencies.length) { + let maxDepth = 1000; + while (rootNode._dependencies.length && maxDepth) { rootNode = rootNode._dependencies[0]; + maxDepth--; + } + + if (!maxDepth) { + throw new Error('Maximum depth exceeded: getRootNode'); } return rootNode; @@ -109,6 +135,11 @@ class Node { if (!shouldIncludeNode(originalNode)) return; const clonedNode = originalNode.cloneWithoutRelationships(); idToNodeMap.set(clonedNode.id, clonedNode); + }); + + rootNode.traverse(originalNode => { + if (!shouldIncludeNode(originalNode)) return; + const clonedNode = idToNodeMap.get(originalNode.id); for (const dependency of originalNode._dependencies) { const clonedDependency = idToNodeMap.get(dependency.id); @@ -165,4 +196,9 @@ class Node { } } +Node.TYPES = { + UNKNOWN: 'unknown', + NETWORK: 'network', +}; + module.exports = Node; diff --git a/lighthouse-core/gather/computed/page-dependency-graph.js b/lighthouse-core/gather/computed/page-dependency-graph.js index 58bf76377f48..4398614c19d4 100644 --- a/lighthouse-core/gather/computed/page-dependency-graph.js +++ b/lighthouse-core/gather/computed/page-dependency-graph.js @@ -6,8 +6,8 @@ 'use strict'; const ComputedArtifact = require('./computed-artifact'); -const Node = require('./dependency-graph/node'); -const Emulation = require('../../lib/emulation'); +const NetworkNode = require('./dependency-graph/network-node'); +const GraphEstimator = require('./dependency-graph/estimator/estimator'); class PageDependencyGraphArtifact extends ComputedArtifact { get name() { @@ -43,7 +43,7 @@ class PageDependencyGraphArtifact extends ComputedArtifact { const urlToNodeMap = new Map(); networkRecords.forEach(record => { - const node = new Node(record.requestId); + const node = new NetworkNode(record); idToNodeMap.set(record.requestId, node); if (urlToNodeMap.has(record.url)) { @@ -79,26 +79,37 @@ class PageDependencyGraphArtifact extends ComputedArtifact { * @return {number} */ static computeGraphDuration(rootNode) { - const depthByNodeId = new Map(); - const getMax = arr => Array.from(arr).reduce((max, next) => Math.max(max, next), 0); - - let startingMax = Infinity; - let endingMax = Infinity; - while (endingMax === Infinity || startingMax > endingMax) { - startingMax = endingMax; - endingMax = 0; - - rootNode.traverse(node => { - const dependencies = node.getDependencies(); - const dependencyDepths = dependencies.map(node => depthByNodeId.get(node.id) || Infinity); - const maxDepth = getMax(dependencyDepths); - endingMax = Math.max(endingMax, maxDepth); - depthByNodeId.set(node.id, maxDepth + 1); - }); + return new GraphEstimator(rootNode).estimate(); + } + + /** + * + * @param {!Node} rootNode + */ + static printGraph(rootNode, widthInCharacters = 100) { + function padRight(str, target, padChar = ' ') { + while (str.length < target) { + str += padChar; + } + return str; } - const maxDepth = getMax(depthByNodeId.values()); - return maxDepth * Emulation.settings.TYPICAL_MOBILE_THROTTLING_METRICS.latency; + const nodes = []; + rootNode.traverse(node => nodes.push(node)); + nodes.sort((a, b) => a.startTime - b.startTime); + + const min = nodes[0].startTime; + const max = nodes.reduce((max, node) => Math.max(max, node.endTime), 0); + + const totalTime = max - min; + const timePerCharacter = totalTime / widthInCharacters; + nodes.forEach(node => { + const offset = Math.round((node.startTime - min) / timePerCharacter); + const length = Math.ceil((node.endTime - node.startTime) / timePerCharacter); + const bar = padRight('', offset) + padRight('', length, '='); + // eslint-disable-next-line + console.log(padRight(bar, widthInCharacters), `| ${node.record._url.slice(0, 30)}`); + }); } /** diff --git a/lighthouse-core/test/audits/predictive-perf-test.js b/lighthouse-core/test/audits/predictive-perf-test.js index 14ba8634ffe3..2a4abac6ca0a 100644 --- a/lighthouse-core/test/audits/predictive-perf-test.js +++ b/lighthouse-core/test/audits/predictive-perf-test.js @@ -26,9 +26,15 @@ describe('Performance: predictive performance audit', () => { }, Runner.instantiateComputedArtifacts()); return PredictivePerf.audit(artifacts).then(output => { - assert.equal(output.score, 97); - assert.equal(Math.round(output.rawValue), 2250); - assert.equal(output.displayValue, '2,250\xa0ms'); + assert.equal(output.score, 66); + assert.equal(Math.round(output.rawValue), 7226); + assert.equal(output.displayValue, '7,230\xa0ms'); + + const valueOf = name => Math.round(output.extendedInfo.value[name]); + assert.equal(valueOf('optimisticFMP'), 1058); + assert.equal(valueOf('pessimisticFMP'), 4704); + assert.equal(valueOf('optimisticTTCI'), 4207); + assert.equal(valueOf('pessimisticTTCI'), 18935); }); }); }); diff --git a/lighthouse-core/test/gather/computed/dependency-graph/estimator/estimator-test.js b/lighthouse-core/test/gather/computed/dependency-graph/estimator/estimator-test.js new file mode 100644 index 000000000000..e68d30d99ffe --- /dev/null +++ b/lighthouse-core/test/gather/computed/dependency-graph/estimator/estimator-test.js @@ -0,0 +1,104 @@ +/** + * @license Copyright 2017 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + */ +'use strict'; + +const Node = require('../../../../../gather/computed/dependency-graph/network-node'); +const Estimator = require('../../../../../gather/computed/dependency-graph/estimator/estimator'); + +const assert = require('assert'); +let nextRequestId = 1; + +function request({requestId, connectionId, transferSize, scheme, timing}) { + requestId = requestId || nextRequestId++; + connectionId = connectionId || 1; + transferSize = transferSize || 1000; + scheme = scheme || 'http'; + + return { + requestId, + connectionId, + transferSize, + parsedURL: {scheme}, + _timing: timing, + }; +} + +/* eslint-env mocha */ +describe('DependencyGraph/Estimator', () => { + describe('.estimate', () => { + it('should estimate basic graphs', () => { + const rootNode = new Node(request({})); + const estimator = new Estimator(rootNode, {defaultResponseTime: 500}); + const result = estimator.estimate(); + // should be 2 RTTs and 500ms for the server response time + assert.equal(result, 300 + 500); + }); + + it('should estimate basic waterfall graphs', () => { + const nodeA = new Node(request({connectionId: 1})); + const nodeB = new Node(request({connectionId: 2})); + const nodeC = new Node(request({connectionId: 3})); + const nodeD = new Node(request({connectionId: 4})); + + nodeA.addDependent(nodeB); + nodeB.addDependent(nodeC); + nodeC.addDependent(nodeD); + + const estimator = new Estimator(nodeA, {defaultResponseTime: 500}); + const result = estimator.estimate(); + // should be 800ms each for A, B, C, D + assert.equal(result, 3200); + }); + + it('should estimate basic parallel requests', () => { + const nodeA = new Node(request({connectionId: 1})); + const nodeB = new Node(request({connectionId: 2})); + const nodeC = new Node(request({connectionId: 3, transferSize: 15000})); + const nodeD = new Node(request({connectionId: 4})); + + nodeA.addDependent(nodeB); + nodeA.addDependent(nodeC); + nodeA.addDependent(nodeD); + + const estimator = new Estimator(nodeA, {defaultResponseTime: 500}); + const result = estimator.estimate(); + // should be 800ms for A and 950ms for C (2 round trips of downloading) + assert.equal(result, 800 + 950); + }); + + it('should not reuse connections', () => { + const nodeA = new Node(request({connectionId: 1})); + const nodeB = new Node(request({connectionId: 1})); + const nodeC = new Node(request({connectionId: 1})); + const nodeD = new Node(request({connectionId: 1})); + + nodeA.addDependent(nodeB); + nodeA.addDependent(nodeC); + nodeA.addDependent(nodeD); + + const estimator = new Estimator(nodeA, {defaultResponseTime: 500}); + const result = estimator.estimate(); + // should be 800ms for A and 650ms for the next 3 + assert.equal(result, 800 + 650 * 3); + }); + + it('should adjust throughput based on number of requests', () => { + const nodeA = new Node(request({connectionId: 1})); + const nodeB = new Node(request({connectionId: 2})); + const nodeC = new Node(request({connectionId: 3, transferSize: 15000})); + const nodeD = new Node(request({connectionId: 4})); + + nodeA.addDependent(nodeB); + nodeA.addDependent(nodeC); + nodeA.addDependent(nodeD); + + const estimator = new Estimator(nodeA, {defaultResponseTime: 500}); + const result = estimator.estimate(); + // should be 800ms for A and 950ms for C (2 round trips of downloading) + assert.equal(result, 800 + 950); + }); + }); +}); diff --git a/lighthouse-core/test/gather/computed/dependency-graph/estimator/tcp-connection-test.js b/lighthouse-core/test/gather/computed/dependency-graph/estimator/tcp-connection-test.js new file mode 100644 index 000000000000..1fc639ae6dac --- /dev/null +++ b/lighthouse-core/test/gather/computed/dependency-graph/estimator/tcp-connection-test.js @@ -0,0 +1,214 @@ +/** + * @license Copyright 2017 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + */ +'use strict'; + +// eslint-disable-next-line +const TcpConnection = require('../../../../../gather/computed/dependency-graph/estimator/tcp-connection'); + +const assert = require('assert'); + +/* eslint-env mocha */ +describe('DependencyGraph/Estimator/TcpConnection', () => { + describe('#constructor', () => { + it('should create the connection', () => { + const rtt = 150; + const throughput = 1600 * 1024; + const connection = new TcpConnection(rtt, throughput); + assert.ok(connection); + assert.equal(connection._rtt, rtt); + }); + }); + + describe('#maximumSaturatedConnections', () => { + it('should compute number of supported simulated requests', () => { + const availableThroughput = 1460 * 8 * 10; // 10 TCP segments/second + assert.equal(TcpConnection.maximumSaturatedConnections(100, availableThroughput), 1); + assert.equal(TcpConnection.maximumSaturatedConnections(300, availableThroughput), 3); + assert.equal(TcpConnection.maximumSaturatedConnections(1000, availableThroughput), 10); + }); + }); + + describe('.setWarmed', () => { + it('adjusts the time to download appropriately', () => { + const connection = new TcpConnection(100, Infinity); + assert.equal(connection.calculateTimeToDownload(0).timeElapsed, 300); + connection.setWarmed(true); + assert.equal(connection.calculateTimeToDownload(0).timeElapsed, 100); + }); + }); + + describe('.setCongestionWindow', () => { + it('adjusts the time to download appropriately', () => { + const connection = new TcpConnection(100, Infinity); + assert.deepEqual(connection.calculateTimeToDownload(50000), { + bytesDownloaded: 50000, + congestionWindow: 40, + roundTrips: 5, + timeElapsed: 500, + }); + connection.setCongestionWindow(80); // will download all in one round trip + assert.deepEqual(connection.calculateTimeToDownload(50000), { + bytesDownloaded: 50000, + congestionWindow: 80, + roundTrips: 3, + timeElapsed: 300, + }); + }); + }); + + describe('.calculateTimeToDownload', () => { + context('when maximumTime is not set', () => { + it('should provide the correct values small payload non-SSL', () => { + const connection = new TcpConnection(100, Infinity, 0, false); + assert.deepEqual(connection.calculateTimeToDownload(7300), { + bytesDownloaded: 7300, + congestionWindow: 10, + roundTrips: 2, + timeElapsed: 200, + }); + }); + + it('should provide the correct values small payload SSL', () => { + const connection = new TcpConnection(100, Infinity, 0, true); + assert.deepEqual(connection.calculateTimeToDownload(7300), { + bytesDownloaded: 7300, + congestionWindow: 10, + roundTrips: 3, + timeElapsed: 300, + }); + }); + + it('should provide the correct values response time', () => { + const responseTime = 78; + const connection = new TcpConnection(100, Infinity, responseTime, true); + assert.deepEqual(connection.calculateTimeToDownload(7300), { + bytesDownloaded: 7300, + congestionWindow: 10, + roundTrips: 3, + timeElapsed: 300 + responseTime, + }); + }); + + it('should provide the correct values large payload', () => { + const connection = new TcpConnection(100, 8 * 1000 * 1000); + const bytesToDownload = 10 * 1000 * 1000; // 10 mb + assert.deepEqual(connection.calculateTimeToDownload(bytesToDownload), { + bytesDownloaded: bytesToDownload, + congestionWindow: 68, + roundTrips: 105, + timeElapsed: 10500, + }); + }); + + it('should provide the correct values resumed small payload', () => { + const connection = new TcpConnection(100, Infinity, 0, true); + assert.deepEqual(connection.calculateTimeToDownload(7300, 250), { + bytesDownloaded: 7300, + congestionWindow: 10, + roundTrips: 3, + timeElapsed: 50, + }); + }); + + it('should provide the correct values resumed large payload', () => { + const connection = new TcpConnection(100, 8 * 1000 * 1000); + const bytesToDownload = 5 * 1000 * 1000; // 5 mb + connection.setCongestionWindow(68); + assert.deepEqual(connection.calculateTimeToDownload(bytesToDownload, 5234), { + bytesDownloaded: bytesToDownload, + congestionWindow: 68, + roundTrips: 51, // 5 mb / (1460 * 68) + timeElapsed: 5100, + }); + }); + }); + + context('when maximumTime is set', () => { + it('should provide the correct values less than TTFB', () => { + const connection = new TcpConnection(100, Infinity, 0, false); + assert.deepEqual(connection.calculateTimeToDownload(7300, 0, 68), { + bytesDownloaded: 7300, + congestionWindow: 10, + roundTrips: 2, + timeElapsed: 200, + }); + }); + + it('should provide the correct values just over TTFB', () => { + const connection = new TcpConnection(100, Infinity, 0, false); + assert.deepEqual(connection.calculateTimeToDownload(7300, 0, 250), { + bytesDownloaded: 7300, + congestionWindow: 10, + roundTrips: 2, + timeElapsed: 200, + }); + }); + + it('should provide the correct values with already elapsed', () => { + const connection = new TcpConnection(100, Infinity, 0, false); + assert.deepEqual(connection.calculateTimeToDownload(7300, 75, 250), { + bytesDownloaded: 7300, + congestionWindow: 10, + roundTrips: 2, + timeElapsed: 125, + }); + }); + + it('should provide the correct values large payloads', () => { + const connection = new TcpConnection(100, 8 * 1000 * 1000); + const bytesToDownload = 10 * 1000 * 1000; // 10 mb + assert.deepEqual(connection.calculateTimeToDownload(bytesToDownload, 500, 740), { + bytesDownloaded: 683280, // should be less than 68 * 1460 * 8 + congestionWindow: 68, + roundTrips: 8, + timeElapsed: 800, // skips the handshake because time already elapsed + }); + }); + + it('should all add up', () => { + const connection = new TcpConnection(100, 8 * 1000 * 1000); + const bytesToDownload = 10 * 1000 * 1000; // 10 mb + const firstStoppingPoint = 5234; + const secondStoppingPoint = 315; + const thirdStoppingPoint = 10500 - firstStoppingPoint - secondStoppingPoint; + + const firstSegment = connection.calculateTimeToDownload( + bytesToDownload, + 0, + firstStoppingPoint + ); + const firstOvershoot = firstSegment.timeElapsed - firstStoppingPoint; + + connection.setCongestionWindow(firstSegment.congestionWindow); + const secondSegment = connection.calculateTimeToDownload( + bytesToDownload - firstSegment.bytesDownloaded, + firstSegment.timeElapsed, + secondStoppingPoint - firstOvershoot + ); + const secondOvershoot = firstOvershoot + secondSegment.timeElapsed - secondStoppingPoint; + + connection.setCongestionWindow(secondSegment.congestionWindow); + const thirdSegment = connection.calculateTimeToDownload( + bytesToDownload - firstSegment.bytesDownloaded - secondSegment.bytesDownloaded, + firstSegment.timeElapsed + secondSegment.timeElapsed + ); + const thirdOvershoot = secondOvershoot + thirdSegment.timeElapsed - thirdStoppingPoint; + + assert.equal(thirdOvershoot, 0); + assert.equal( + firstSegment.bytesDownloaded + + secondSegment.bytesDownloaded + + thirdSegment.bytesDownloaded, + bytesToDownload + ); + assert.equal( + firstSegment.timeElapsed + secondSegment.timeElapsed + thirdSegment.timeElapsed, + 10500 + ); + }); + }); + }); +}); diff --git a/lighthouse-core/test/gather/computed/dependency-graph/node-test.js b/lighthouse-core/test/gather/computed/dependency-graph/node-test.js index 0b5cb6a21a51..2fc53eafdc3d 100644 --- a/lighthouse-core/test/gather/computed/dependency-graph/node-test.js +++ b/lighthouse-core/test/gather/computed/dependency-graph/node-test.js @@ -147,6 +147,32 @@ describe('DependencyGraph/Node', () => { }); }); + it('should create a copy of a graph with long dependency chains', () => { + // C - D - E - F + // / \ + // A - - - - - - - B + const nodeA = new Node('A'); + const nodeB = new Node('B'); + const nodeC = new Node('C'); + const nodeD = new Node('D'); + const nodeE = new Node('E'); + const nodeF = new Node('F'); + + nodeA.addDependent(nodeB); + nodeF.addDependent(nodeB); + + nodeA.addDependent(nodeC); + nodeC.addDependent(nodeD); + nodeD.addDependent(nodeE); + nodeE.addDependent(nodeF); + + const clone = nodeA.cloneWithRelationships(); + + const clonedIdMap = new Map(); + clone.traverse(node => clonedIdMap.set(node.id, node)); + assert.equal(clonedIdMap.size, 6); + }); + it('should create a copy when not starting at root node', () => { const graph = createComplexGraph(); const cloneD = graph.nodeD.cloneWithRelationships(); diff --git a/lighthouse-core/test/gather/computed/page-dependency-graph-test.js b/lighthouse-core/test/gather/computed/page-dependency-graph-test.js index a909486856fa..eca31b54290b 100644 --- a/lighthouse-core/test/gather/computed/page-dependency-graph-test.js +++ b/lighthouse-core/test/gather/computed/page-dependency-graph-test.js @@ -80,35 +80,4 @@ describe('PageDependencyGraph computed artifact:', () => { assert.deepEqual(nodes[3].getDependencies(), [nodes[0]]); // should depend on rootNode instead }); }); - - describe('#computeGraphDuration', () => { - it('should compute graph duration', () => { - // B - C - D - E - F - // / / \ - // A - * - * - * - * G - H - - const nodeA = new Node('A'); - const nodeB = new Node('B'); - const nodeC = new Node('C'); - const nodeD = new Node('D'); - const nodeE = new Node('E'); - const nodeF = new Node('F'); - const nodeG = new Node('G'); - const nodeH = new Node('H'); - - nodeA.addDependent(nodeB); - nodeA.addDependent(nodeE); - - nodeB.addDependent(nodeC); - nodeC.addDependent(nodeD); - nodeD.addDependent(nodeE); - nodeE.addDependent(nodeF); - nodeF.addDependent(nodeG); - - nodeG.addDependent(nodeH); - - const result = PageDependencyGraph.computeGraphDuration(nodeA); - assert.equal(result, 4500); // 7 hops * ~560ms latency/hop - }); - }); });