From badbe115efcd7676e751500530f473e65d8cc26a Mon Sep 17 00:00:00 2001 From: Patrick Hulce Date: Mon, 23 Apr 2018 18:10:18 -0700 Subject: [PATCH 1/6] core(preload): use lantern to compute savings --- lighthouse-core/audits/uses-rel-preload.js | 115 ++++++++--- lighthouse-core/lib/dependency-graph/node.js | 25 +++ .../simulator/connection-pool.js | 37 +++- .../dependency-graph/simulator/simulator.js | 22 ++- .../simulator/tcp-connection.js | 22 +++ .../test/audits/uses-rel-preload-test.js | 102 +++++++--- .../simulator/connection-pool-test.js | 187 ++++++++++++++++++ 7 files changed, 445 insertions(+), 65 deletions(-) create mode 100644 lighthouse-core/test/lib/dependency-graph/simulator/connection-pool-test.js diff --git a/lighthouse-core/audits/uses-rel-preload.js b/lighthouse-core/audits/uses-rel-preload.js index 963c9a8248d4..6e96f8da18b8 100644 --- a/lighthouse-core/audits/uses-rel-preload.js +++ b/lighthouse-core/audits/uses-rel-preload.js @@ -50,55 +50,118 @@ class UsesRelPreloadAudit extends Audit { return requests; } + /** + * Computes the estimated effect of preloading all the resources. + * @param {Set} urls The array of byte savings results per resource + * @param {Node} graph + * @param {Simulator} simulator + * @param {LH.WebInspector.NetworkRequest} mainResource + * @return {{wastedMs: number, results: Array<{url: string, wastedMs: number}>}} + */ + static computeWasteWithGraph(urls, graph, simulator, mainResource) { + if (!urls.size) { + return {wastedMs: 0, results: []}; + } + + const simulationBeforeChanges = simulator.simulate(graph, {ignoreObserved: true}); + + const modifiedGraph = graph.cloneWithRelationships(); + + const nodesToPreload = []; + /** @type {Node|null} */ + let mainDocumentNode = null; + modifiedGraph.traverse(node => { + if (node.record && urls.has(node.record.url)) { + nodesToPreload.push(node); + } + + if (node.record && node.record.url === mainResource.url) { + mainDocumentNode = node; + } + }); + + if (!mainDocumentNode) { + // Should always find the main document node + throw new Error('Could not find main document node'); + } + + // Preload has the effect of moving the resource's only dependency to the main HTML document + // Remove all dependencies of the nodes + for (const node of nodesToPreload) { + node.removeAllDependencies(); + node.addDependency(mainDocumentNode); + } + + const simulationAfterChanges = simulator.simulate(modifiedGraph, {ignoreObserved: true}); + const originalNodesByRecord = Array.from(simulationBeforeChanges.nodeTimings.keys()) + .reduce((map, node) => map.set(node.record, node), new Map()); + + const results = []; + for (const node of nodesToPreload) { + const originalNode = originalNodesByRecord.get(node.record); + const timingAfter = simulationAfterChanges.nodeTimings.get(node); + const timingBefore = simulationBeforeChanges.nodeTimings.get(originalNode); + const wastedMs = Math.round(timingBefore.endTime - timingAfter.endTime); + if (wastedMs < THRESHOLD_IN_MS) continue; + results.push({url: node.record.url, wastedMs}); + } + + if (!results.length) { + return {wastedMs: 0, results}; + } + + return { + // Preload won't necessarily impact the deepest chain/overall time + // We'll use the maximum endTime improvement for now + wastedMs: Math.max(...results.map(item => item.wastedMs)), + results, + }; + } + /** * @param {!Artifacts} artifacts * @return {!AuditResult} */ - static audit(artifacts) { - const devtoolsLogs = artifacts.devtoolsLogs[UsesRelPreloadAudit.DEFAULT_PASS]; + static audit(artifacts, context) { + const trace = artifacts.traces[UsesRelPreloadAudit.DEFAULT_PASS]; + const devtoolsLog = artifacts.devtoolsLogs[UsesRelPreloadAudit.DEFAULT_PASS]; + const simulatorOptions = {trace, devtoolsLog, settings: context.settings}; return Promise.all([ - artifacts.requestCriticalRequestChains(devtoolsLogs), - artifacts.requestMainResource(devtoolsLogs), - ]).then(([critChains, mainResource]) => { - const results = []; - let maxWasted = 0; + // TODO(phulce): eliminate dependency on CRC + artifacts.requestCriticalRequestChains(devtoolsLog), + artifacts.requestMainResource(devtoolsLog), + artifacts.requestPageDependencyGraph({trace, devtoolsLog}), + artifacts.requestLoadSimulator(simulatorOptions), + ]).then(([critChains, mainResource, graph, simulator]) => { // get all critical requests 2 + mainResourceIndex levels deep const mainResourceIndex = mainResource.redirects ? mainResource.redirects.length : 0; const criticalRequests = UsesRelPreloadAudit._flattenRequests(critChains, 3 + mainResourceIndex, 2 + mainResourceIndex); - criticalRequests.forEach(request => { - const networkRecord = request; + const urls = new Set(); + for (const networkRecord of criticalRequests) { if (!networkRecord._isLinkPreload && networkRecord.protocol !== 'data') { - // calculate time between mainresource.endTime and resource start time - const wastedMs = Math.min(request._startTime - mainResource._endTime, - request._endTime - request._startTime) * 1000; - - if (wastedMs >= THRESHOLD_IN_MS) { - maxWasted = Math.max(wastedMs, maxWasted); - results.push({ - url: request.url, - wastedMs: Util.formatMilliseconds(wastedMs), - }); - } + urls.add(networkRecord._url); } - }); + } + const {results, wastedMs} = UsesRelPreloadAudit.computeWasteWithGraph(urls, graph, simulator, + mainResource); // sort results by wastedTime DESC results.sort((a, b) => b.wastedMs - a.wastedMs); const headings = [ {key: 'url', itemType: 'url', text: 'URL'}, - {key: 'wastedMs', itemType: 'text', text: 'Potential Savings'}, + {key: 'wastedMs', itemType: 'ms', text: 'Potential Savings', granularity: 10}, ]; - const summary = {wastedMs: maxWasted}; + const summary = {wastedMs}; const details = Audit.makeTableDetails(headings, results, summary); return { - score: UnusedBytes.scoreForWastedMs(maxWasted), - rawValue: maxWasted, - displayValue: Util.formatMilliseconds(maxWasted), + score: UnusedBytes.scoreForWastedMs(wastedMs), + rawValue: wastedMs, + displayValue: Util.formatMilliseconds(wastedMs), extendedInfo: { value: results, }, diff --git a/lighthouse-core/lib/dependency-graph/node.js b/lighthouse-core/lib/dependency-graph/node.js index 877882e77cc4..0b6724552b1f 100644 --- a/lighthouse-core/lib/dependency-graph/node.js +++ b/lighthouse-core/lib/dependency-graph/node.js @@ -110,6 +110,31 @@ class Node { this._dependencies.push(node); } + /** + * @param {Node} node + */ + removeDependent(node) { + node.removeDependency(this); + } + + /** + * @param {Node} node + */ + removeDependency(node) { + if (!this._dependencies.includes(node)) { + return; + } + + node._dependents.splice(node._dependents.indexOf(this), 1); + this._dependencies.splice(this._dependencies.indexOf(node), 1); + } + + removeAllDependencies() { + for (const node of this._dependencies.slice()) { + this.removeDependency(node); + } + } + /** * Clones the node's information without adding any dependencies/dependents. * @return {Node} diff --git a/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js b/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js index 8295863eccf9..aaf026d4e325 100644 --- a/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js +++ b/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js @@ -82,15 +82,21 @@ module.exports = class ConnectionPool { throw new Error(`Could not find a connection for origin: ${origin}`); } + // Make sure each origin has 6 connections available + // https://cs.chromium.org/chromium/src/net/socket/client_socket_pool_manager.cc?type=cs&q="int+g_max_sockets_per_group" + while (connections.length < 6) connections.push(connections[0].clone()); + while (connections.length > 6) connections.pop(); + this._connectionsByOrigin.set(origin, connections); } } /** * @param {LH.WebInspector.NetworkRequest} record + * @param {{ignoreObserved?: boolean}} options * @return {?TcpConnection} */ - acquire(record) { + acquire(record, options = {}) { if (this._connectionsByRecord.has(record)) { // @ts-ignore return this._connectionsByRecord.get(record); @@ -99,16 +105,29 @@ module.exports = class ConnectionPool { const origin = String(record.origin); /** @type {TcpConnection[]} */ const connections = this._connectionsByOrigin.get(origin) || []; - const wasConnectionWarm = !!this._connectionReusedByRequestId.get(record.requestId); - const connection = connections.find(connection => { - const meetsWarmRequirement = wasConnectionWarm === connection.isWarm(); - return meetsWarmRequirement && !this._connectionsInUse.has(connection); + // Sort connections by decreasing congestion window, i.e. warmest to coldest + const sortedConnections = connections.sort((a, b) => b.congestionWindow - a.congestionWindow); + + const availableWarmConnection = sortedConnections.find(connection => { + return connection.isWarm() && !this._connectionsInUse.has(connection); + }); + const availableColdConnection = sortedConnections.find(connection => { + return !connection.isWarm() && !this._connectionsInUse.has(connection); }); - if (!connection) return null; - this._connectionsInUse.add(connection); - this._connectionsByRecord.set(record, connection); - return connection; + const needsColdConnection = !options.ignoreObserved && + !this._connectionReusedByRequestId.get(record.requestId); + const needsWarmConnection = !options.ignoreObserved && + this._connectionReusedByRequestId.get(record.requestId); + + let connectionToUse = availableWarmConnection || availableColdConnection; + if (needsColdConnection) connectionToUse = availableColdConnection; + if (needsWarmConnection) connectionToUse = availableWarmConnection; + if (!connectionToUse) return null; + + this._connectionsInUse.add(connectionToUse); + this._connectionsByRecord.set(record, connectionToUse); + return connectionToUse; } /** diff --git a/lighthouse-core/lib/dependency-graph/simulator/simulator.js b/lighthouse-core/lib/dependency-graph/simulator/simulator.js index 5b8f4d6230e4..70eb3c555165 100644 --- a/lighthouse-core/lib/dependency-graph/simulator/simulator.js +++ b/lighthouse-core/lib/dependency-graph/simulator/simulator.js @@ -52,6 +52,7 @@ class Simulator { this._layoutTaskMultiplier = this._cpuSlowdownMultiplier * this._options.layoutTaskMultiplier; // Properties reset on every `.simulate` call but duplicated here for type checking + this._ignoreObserved = false; this._nodeTimings = new Map(); this._numberInProgressByType = new Map(); this._nodes = {}; @@ -147,6 +148,16 @@ class Simulator { } } + /** + * @param {LH.WebInspector.NetworkRequest} record + * @return {?TcpConnection} + */ + _acquireConnection(record) { + return this._connectionPool.acquire(record, { + ignoreObserved: this._ignoreObserved, + }); + } + /** * @param {Node} node * @param {number} totalElapsedTime @@ -167,7 +178,7 @@ class Simulator { // Start a network request if we're not at max requests and a connection is available const numberOfActiveRequests = this._numberInProgress(node.type); if (numberOfActiveRequests >= this._maximumConcurrentRequests) return; - const connection = this._connectionPool.acquire(/** @type {NetworkNode} */ (node).record); + const connection = this._acquireConnection(/** @type {NetworkNode} */ (node).record); if (!connection) return; this._markNodeAsInProgress(node, totalElapsedTime); @@ -212,7 +223,7 @@ class Simulator { const record = /** @type {NetworkNode} */ (node).record; const timingData = this._nodeTimings.get(node); - const connection = /** @type {TcpConnection} */ (this._connectionPool.acquire(record)); + const connection = /** @type {TcpConnection} */ (this._acquireConnection(record)); const calculation = connection.simulateDownloadUntil( record.transferSize - timingData.bytesDownloaded, {timeAlreadyElapsed: timingData.timeElapsed, maximumTimeToElapse: Infinity} @@ -255,7 +266,7 @@ class Simulator { if (node.type !== Node.TYPES.NETWORK) throw new Error('Unsupported'); const record = /** @type {NetworkNode} */ (node).record; - const connection = /** @type {TcpConnection} */ (this._connectionPool.acquire(record)); + const connection = /** @type {TcpConnection} */ (this._acquireConnection(record)); const calculation = connection.simulateDownloadUntil( record.transferSize - timingData.bytesDownloaded, { @@ -281,10 +292,13 @@ class Simulator { /** * Estimates the time taken to process all of the graph's nodes. * @param {Node} graph + * @param {{ignoreObserved?: boolean}=} options * @return {LH.Gatherer.Simulation.Result} */ - simulate(graph) { + simulate(graph, options) { + options = Object.assign({ignoreObserved: false}, options); // initialize the necessary data containers + this._ignoreObserved = options.ignoreObserved; this._initializeConnectionPool(graph); this._initializeAuxiliaryData(); diff --git a/lighthouse-core/lib/dependency-graph/simulator/tcp-connection.js b/lighthouse-core/lib/dependency-graph/simulator/tcp-connection.js index 8a4c7c699a1f..7aa6017ea17d 100644 --- a/lighthouse-core/lib/dependency-graph/simulator/tcp-connection.js +++ b/lighthouse-core/lib/dependency-graph/simulator/tcp-connection.js @@ -78,6 +78,20 @@ class TcpConnection { return this._warmed; } + /** + * @return {boolean} + */ + isH2() { + return this._h2; + } + + /** + * @return {number} + */ + get congestionWindow() { + return this._congestionWindow; + } + /** * Sets the number of excess bytes that are available to this connection on future downloads, only * applies to H2 connections. @@ -88,6 +102,14 @@ class TcpConnection { this._h2OverflowBytesDownloaded = bytes; } + /** + * @return {TcpConnection} + */ + clone() { + // @ts-ignore + return Object.assign(new TcpConnection(), this); + } + /** * Simulates a network download of a particular number of bytes over an optional maximum amount of time * and returns information about the ending state. diff --git a/lighthouse-core/test/audits/uses-rel-preload-test.js b/lighthouse-core/test/audits/uses-rel-preload-test.js index ce86ed34cb0f..4137405e56f9 100644 --- a/lighthouse-core/test/audits/uses-rel-preload-test.js +++ b/lighthouse-core/test/audits/uses-rel-preload-test.js @@ -9,56 +9,105 @@ /* eslint-env mocha */ const UsesRelPreload = require('../../audits/uses-rel-preload.js'); +const NetworkNode = require('../../lib/dependency-graph/network-node'); const assert = require('assert'); const defaultMainResource = { _endTime: 1, }; -const mockArtifacts = (networkRecords, mockChain, mainResource = defaultMainResource) => { - return { - devtoolsLogs: { - [UsesRelPreload.DEFAULT_PASS]: [], - }, - requestCriticalRequestChains: () => { - return Promise.resolve(mockChain); - }, - requestNetworkRecords: () => networkRecords, - requestMainResource: () => { - return Promise.resolve(mainResource); - }, +describe('Performance: uses-rel-preload audit', () => { + let mockGraph; + let mockSimulator; + + const mockArtifacts = (networkRecords, mockChain, mainResource = defaultMainResource) => { + return { + traces: {[UsesRelPreload.DEFAULT_PASS]: {traceEvents: []}}, + devtoolsLogs: {[UsesRelPreload.DEFAULT_PASS]: []}, + requestCriticalRequestChains: () => { + return Promise.resolve(mockChain); + }, + requestLoadSimulator: () => mockSimulator, + requestPageDependencyGraph: () => mockGraph, + requestNetworkRecords: () => networkRecords, + requestMainResource: () => { + return Promise.resolve(mainResource); + }, + }; }; -}; -describe('Performance: uses-rel-preload audit', () => { - it(`should suggest preload resource`, () => { + function buildNode(requestId, url) { + return new NetworkNode({url, requestId}); + } + + it('should suggest preload resource', () => { + const rootNode = buildNode(1, 'http://example.com'); + const mainDocumentNode = buildNode(2, 'http://www.example.com'); + const scriptNode = buildNode(3, 'http://www.example.com/script.js'); + const scriptAddedNode = buildNode(4, 'http://www.example.com/script-added.js'); + + mainDocumentNode.addDependency(rootNode); + scriptNode.addDependency(mainDocumentNode); + scriptAddedNode.addDependency(scriptNode); + + mockGraph = rootNode; + mockSimulator = { + simulate(graph) { + const nodesByUrl = new Map(); + graph.traverse(node => nodesByUrl.set(node.record.url, node)); + + const rootNodeLocal = nodesByUrl.get(rootNode.record.url); + const mainDocumentNodeLocal = nodesByUrl.get(mainDocumentNode.record.url); + const scriptNodeLocal = nodesByUrl.get(scriptNode.record.url); + const scriptAddedNodeLocal = nodesByUrl.get(scriptAddedNode.record.url); + + const nodeTimings = new Map([ + [rootNodeLocal, {starTime: 0, endTime: 500}], + [mainDocumentNodeLocal, {startTime: 500, endTime: 1000}], + [scriptNodeLocal, {startTime: 1000, endTime: 2000}], + [scriptAddedNodeLocal, {startTime: 2000, endTime: 3250}], + ]); + + if (scriptAddedNodeLocal.getDependencies()[0] === mainDocumentNodeLocal) { + nodeTimings.set(scriptAddedNodeLocal, {startTime: 1000, endTime: 2000}); + } + + return {nodeTimings}; + }, + }; + const mainResource = Object.assign({}, defaultMainResource, { + url: 'http://www.example.com', redirects: [''], }); const networkRecords = [ { requestId: '2', - _endTime: 1, _isLinkPreload: false, _url: 'http://www.example.com', }, { requestId: '3', - _startTime: 10, - _endTime: 19, _isLinkPreload: false, _url: 'http://www.example.com/script.js', }, + { + requestId: '4', + _isLinkPreload: false, + _url: 'http://www.example.com/script-added.js', + }, ]; + const chains = { '1': { children: { '2': { + request: networkRecords[0], children: { '3': { - request: networkRecords[0], + request: networkRecords[1], children: { '4': { - request: networkRecords[1], + request: networkRecords[2], children: {}, }, }, @@ -69,11 +118,12 @@ describe('Performance: uses-rel-preload audit', () => { }, }; - return UsesRelPreload.audit(mockArtifacts(networkRecords, chains, mainResource)) - .then(output => { - assert.equal(output.rawValue, 9000); + return UsesRelPreload.audit(mockArtifacts(networkRecords, chains, mainResource), {}).then( + output => { + assert.equal(output.rawValue, 1250); assert.equal(output.details.items.length, 1); - }); + } + ); }); it(`shouldn't suggest preload for already preloaded records`, () => { @@ -100,7 +150,7 @@ describe('Performance: uses-rel-preload audit', () => { }, }; - return UsesRelPreload.audit(mockArtifacts(networkRecords, chains)).then(output => { + return UsesRelPreload.audit(mockArtifacts(networkRecords, chains), {}).then(output => { assert.equal(output.rawValue, 0); assert.equal(output.details.items.length, 0); }); @@ -130,7 +180,7 @@ describe('Performance: uses-rel-preload audit', () => { }, }; - return UsesRelPreload.audit(mockArtifacts(networkRecords, chains)).then(output => { + return UsesRelPreload.audit(mockArtifacts(networkRecords, chains), {}).then(output => { assert.equal(output.rawValue, 0); assert.equal(output.details.items.length, 0); }); diff --git a/lighthouse-core/test/lib/dependency-graph/simulator/connection-pool-test.js b/lighthouse-core/test/lib/dependency-graph/simulator/connection-pool-test.js new file mode 100644 index 000000000000..4b62f9b257ce --- /dev/null +++ b/lighthouse-core/test/lib/dependency-graph/simulator/connection-pool-test.js @@ -0,0 +1,187 @@ +/** + * @license Copyright 2018 Google Inc. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. + */ +'use strict'; + +const ConnectionPool = require('../../../../lib/dependency-graph/simulator/connection-pool'); + +const assert = require('assert'); +const URL = require('url').URL; + +/* eslint-env mocha */ +describe('DependencyGraph/Simulator/ConnectionPool', () => { + const rtt = 100; + const throughput = 10000 * 1024; + let requestId; + + function record(data = {}) { + const url = data.url || 'http://example.com'; + const origin = new URL(url).origin; + const scheme = url.split(':')[0]; + + return Object.assign({ + requestId: requestId++, + url, + origin, + protocol: 'http/1.1', + parsedURL: {scheme}, + }, data); + } + + beforeEach(() => { + requestId = 1; + }); + + describe('#constructor', () => { + it('should create the pool', () => { + const pool = new ConnectionPool([record()], {rtt, throughput}); + // Make sure 6 connections are created for each origin + assert.equal(pool._connectionsByOrigin.get('http://example.com').length, 6); + // Make sure it populates connectionWasReused + assert.equal(pool._connectionReusedByRequestId.get(1), false); + + const connection = pool._connectionsByOrigin.get('http://example.com')[0]; + assert.equal(connection._rtt, rtt); + assert.equal(connection._throughput, throughput); + assert.equal(connection._serverLatency, 30); // sets to default value + }); + + it('should set TLS properly', () => { + const recordA = record({url: 'https://example.com'}); + const pool = new ConnectionPool([recordA], {rtt, throughput}); + const connection = pool._connectionsByOrigin.get('https://example.com')[0]; + assert.ok(connection._ssl, 'should have set connection TLS'); + }); + + it('should set H2 properly', () => { + const recordA = record({protocol: 'h2'}); + const pool = new ConnectionPool([recordA], {rtt, throughput}); + const connection = pool._connectionsByOrigin.get('http://example.com')[0]; + assert.ok(connection.isH2(), 'should have set HTTP/2'); + }); + + it('should set origin-specific RTT properly', () => { + const additionalRttByOrigin = new Map([['http://example.com', 63]]); + const pool = new ConnectionPool([record()], {rtt, throughput, additionalRttByOrigin}); + const connection = pool._connectionsByOrigin.get('http://example.com')[0]; + assert.ok(connection._rtt, rtt + 63); + }); + + it('should set origin-specific server latency properly', () => { + const serverResponseTimeByOrigin = new Map([['http://example.com', 63]]); + const pool = new ConnectionPool([record()], {rtt, throughput, serverResponseTimeByOrigin}); + const connection = pool._connectionsByOrigin.get('http://example.com')[0]; + assert.ok(connection._serverLatency, 63); + }); + }); + + describe('.acquire', () => { + it('should remember the connection associated with each record', () => { + const recordA = record(); + const recordB = record(); + const pool = new ConnectionPool([recordA, recordB], {rtt, throughput}); + + const connectionForA = pool.acquire(recordA); + const connectionForB = pool.acquire(recordB); + for (let i = 0; i < 10; i++) { + assert.equal(pool.acquire(recordA), connectionForA); + assert.equal(pool.acquire(recordB), connectionForB); + } + + assert.deepStrictEqual(pool.connectionsInUse(), [connectionForA, connectionForB]); + }); + + it('should return null when available connections are exhausted', () => { + const records = new Array(7).fill(undefined, 0, 7).map(() => record()); + const pool = new ConnectionPool(records, {rtt, throughput}); + const connections = records.map(record => pool.acquire(record)); + assert.ok(connections[0], 'did not find connection for 1st record'); + assert.ok(connections[5], 'did not find connection for 6th record'); + assert.equal(connections[6], null); + }); + + it('should respect observed connection reuse', () => { + const coldRecord = record(); + const warmRecord = record(); + const pool = new ConnectionPool([coldRecord, warmRecord], {rtt, throughput}); + pool._connectionReusedByRequestId.set(warmRecord.requestId, true); + assert.ok(pool.acquire(coldRecord), 'should have acquired cold connection'); + assert.ok(!pool.acquire(warmRecord), 'should not have acquired connection'); + pool.release(coldRecord); + + for (const connection of pool._connectionsByOrigin.get('http://example.com')) { + connection.setWarmed(true); + } + + assert.ok(!pool.acquire(coldRecord), 'should not have acquired connection'); + assert.ok(pool.acquire(warmRecord), 'should have acquired warm connection'); + }); + + it('should ignore observed connection reuse when flag is present', () => { + const coldRecord = record(); + const warmRecord = record(); + const pool = new ConnectionPool([coldRecord, warmRecord], {rtt, throughput}); + pool._connectionReusedByRequestId.set(warmRecord.requestId, true); + + const opts = {ignoreObserved: true}; + assert.ok(pool.acquire(coldRecord, opts), 'should have acquired connection'); + assert.ok(pool.acquire(warmRecord, opts), 'should have acquired connection'); + pool.release(coldRecord); + + for (const connection of pool._connectionsByOrigin.get('http://example.com')) { + connection.setWarmed(true); + } + + assert.ok(pool.acquire(coldRecord, opts), 'should have acquired connection'); + assert.ok(pool.acquire(warmRecord, opts), 'should have acquired connection'); + }); + + it('should acquire in order of warmness', () => { + const recordA = record(); + const recordB = record(); + const recordC = record(); + const pool = new ConnectionPool([recordA, recordB, recordC], {rtt, throughput}); + pool._connectionReusedByRequestId.set(recordA.requestId, true); + pool._connectionReusedByRequestId.set(recordB.requestId, true); + pool._connectionReusedByRequestId.set(recordC.requestId, true); + + const [connectionWarm, connectionWarmer, connectionWarmest] = + pool._connectionsByOrigin.get('http://example.com'); + connectionWarm.setWarmed(true); + connectionWarm.setCongestionWindow(10); + connectionWarmer.setWarmed(true); + connectionWarmer.setCongestionWindow(100); + connectionWarmest.setWarmed(true); + connectionWarmest.setCongestionWindow(1000); + + assert.equal(pool.acquire(recordA), connectionWarmest); + assert.equal(pool.acquire(recordB), connectionWarmer); + assert.equal(pool.acquire(recordC), connectionWarm); + }); + }); + + describe('.release', () => { + it('noop for record without connection', () => { + const recordA = record(); + const pool = new ConnectionPool([recordA], {rtt, throughput}); + assert.equal(pool.release(recordA), undefined); + }); + + it('frees the connection for reissue', () => { + const records = new Array(7).fill(undefined, 0, 7).map(() => record()); + const pool = new ConnectionPool(records, {rtt, throughput}); + records.forEach(record => pool.acquire(record)); + + assert.equal(pool.connectionsInUse().length, 6); + assert.ok(!pool.acquire(records[6]), 'had connection that is in use'); + + pool.release(records[0]); + assert.equal(pool.connectionsInUse().length, 5); + + assert.ok(pool.acquire(records[6]), 'could not reissue released connection'); + assert.ok(!pool.acquire(records[0]), 'had connection that is in use'); + }); + }); +}); From 08fa322c87143aa6c64f3505f7017f3581b62cf7 Mon Sep 17 00:00:00 2001 From: Patrick Hulce Date: Mon, 30 Apr 2018 17:25:13 -0700 Subject: [PATCH 2/6] rebase and expand tests --- .../simulator/connection-pool.js | 2 +- .../dependency-graph/simulator/simulator.js | 4 ++++ .../simulator/connection-pool-test.js | 23 +++++++++++++------ 3 files changed, 21 insertions(+), 8 deletions(-) diff --git a/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js b/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js index 064cffeaa194..20ecd578f818 100644 --- a/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js +++ b/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js @@ -121,7 +121,7 @@ module.exports = class ConnectionPool { this._connectionReusedByRequestId.get(record.requestId); let connectionToUse = availableWarmConnection || availableColdConnection; - if (needsColdConnection) connectionToUse = availableColdConnection; + if (needsColdConnection) connectionToUse = availableColdConnection || availableWarmConnection; if (needsWarmConnection) connectionToUse = availableWarmConnection; if (!connectionToUse) return null; diff --git a/lighthouse-core/lib/dependency-graph/simulator/simulator.js b/lighthouse-core/lib/dependency-graph/simulator/simulator.js index 70eb3c555165..2397537cfd49 100644 --- a/lighthouse-core/lib/dependency-graph/simulator/simulator.js +++ b/lighthouse-core/lib/dependency-graph/simulator/simulator.js @@ -322,6 +322,10 @@ class Simulator { this._startNodeIfPossible(node, totalElapsedTime); } + if (!nodesInProgress.size) { + throw new Error('Failed to start a node, potential mismatch in original execution'); + } + // set the available throughput for all connections based on # inflight this._updateNetworkCapacity(); diff --git a/lighthouse-core/test/lib/dependency-graph/simulator/connection-pool-test.js b/lighthouse-core/test/lib/dependency-graph/simulator/connection-pool-test.js index 4b62f9b257ce..fc77384be623 100644 --- a/lighthouse-core/test/lib/dependency-graph/simulator/connection-pool-test.js +++ b/lighthouse-core/test/lib/dependency-graph/simulator/connection-pool-test.js @@ -24,9 +24,8 @@ describe('DependencyGraph/Simulator/ConnectionPool', () => { return Object.assign({ requestId: requestId++, url, - origin, protocol: 'http/1.1', - parsedURL: {scheme}, + parsedURL: {scheme, securityOrigin: () => origin}, }, data); } @@ -107,16 +106,26 @@ describe('DependencyGraph/Simulator/ConnectionPool', () => { const warmRecord = record(); const pool = new ConnectionPool([coldRecord, warmRecord], {rtt, throughput}); pool._connectionReusedByRequestId.set(warmRecord.requestId, true); - assert.ok(pool.acquire(coldRecord), 'should have acquired cold connection'); + + assert.ok(pool.acquire(coldRecord), 'should have acquired connection'); assert.ok(!pool.acquire(warmRecord), 'should not have acquired connection'); pool.release(coldRecord); - for (const connection of pool._connectionsByOrigin.get('http://example.com')) { + const connections = Array.from(pool._connectionsByOrigin.get('http://example.com')); + connections.forEach((connection, i) => { + connection.setWarmed(i % 2 === 0); + }); + + assert.equal(pool.acquire(coldRecord), connections[1], 'should have cold connection'); + assert.equal(pool.acquire(warmRecord), connections[0], 'should have warm connection'); + + connections.forEach(connection => { connection.setWarmed(true); - } + }); - assert.ok(!pool.acquire(coldRecord), 'should not have acquired connection'); - assert.ok(pool.acquire(warmRecord), 'should have acquired warm connection'); + // still allow a cold to receive a warm, just don't prefer it + assert.ok(pool.acquire(coldRecord), 'should have acquired connection'); + assert.ok(pool.acquire(warmRecord), 'should have acquired connection'); }); it('should ignore observed connection reuse when flag is present', () => { From 9d0f99542a29007712d4f727f131fc650763608c Mon Sep 17 00:00:00 2001 From: Patrick Hulce Date: Tue, 1 May 2018 18:44:51 -0700 Subject: [PATCH 3/6] feedback --- lighthouse-core/audits/uses-rel-preload.js | 4 +-- .../simulator/connection-pool.js | 32 ++++++++----------- .../dependency-graph/simulator/simulator.js | 10 +++--- .../simulator/connection-pool-test.js | 24 ++++++++++---- 4 files changed, 38 insertions(+), 32 deletions(-) diff --git a/lighthouse-core/audits/uses-rel-preload.js b/lighthouse-core/audits/uses-rel-preload.js index 8c96858a84d7..c0fdecd6ce83 100644 --- a/lighthouse-core/audits/uses-rel-preload.js +++ b/lighthouse-core/audits/uses-rel-preload.js @@ -63,7 +63,7 @@ class UsesRelPreloadAudit extends Audit { return {wastedMs: 0, results: []}; } - const simulationBeforeChanges = simulator.simulate(graph, {ignoreObserved: true}); + const simulationBeforeChanges = simulator.simulate(graph, {flexibleOrdering: true}); const modifiedGraph = graph.cloneWithRelationships(); @@ -92,7 +92,7 @@ class UsesRelPreloadAudit extends Audit { node.addDependency(mainDocumentNode); } - const simulationAfterChanges = simulator.simulate(modifiedGraph, {ignoreObserved: true}); + const simulationAfterChanges = simulator.simulate(modifiedGraph, {flexibleOrdering: true}); const originalNodesByRecord = Array.from(simulationBeforeChanges.nodeTimings.keys()) .reduce((map, node) => map.set(node.record, node), new Map()); diff --git a/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js b/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js index 20ecd578f818..62366a568990 100644 --- a/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js +++ b/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js @@ -85,7 +85,6 @@ module.exports = class ConnectionPool { // Make sure each origin has 6 connections available // https://cs.chromium.org/chromium/src/net/socket/client_socket_pool_manager.cc?type=cs&q="int+g_max_sockets_per_group" while (connections.length < 6) connections.push(connections[0].clone()); - while (connections.length > 6) connections.pop(); this._connectionsByOrigin.set(origin, connections); } @@ -93,7 +92,7 @@ module.exports = class ConnectionPool { /** * @param {LH.WebInspector.NetworkRequest} record - * @param {{ignoreObserved?: boolean}} options + * @param {{ignoreConnectionReused?: boolean}} options * @return {?TcpConnection} */ acquire(record, options = {}) { @@ -106,23 +105,20 @@ module.exports = class ConnectionPool { /** @type {TcpConnection[]} */ const connections = this._connectionsByOrigin.get(origin) || []; // Sort connections by decreasing congestion window, i.e. warmest to coldest - const sortedConnections = connections.sort((a, b) => b.congestionWindow - a.congestionWindow); - - const availableWarmConnection = sortedConnections.find(connection => { - return connection.isWarm() && !this._connectionsInUse.has(connection); - }); - const availableColdConnection = sortedConnections.find(connection => { - return !connection.isWarm() && !this._connectionsInUse.has(connection); - }); - - const needsColdConnection = !options.ignoreObserved && - !this._connectionReusedByRequestId.get(record.requestId); - const needsWarmConnection = !options.ignoreObserved && - this._connectionReusedByRequestId.get(record.requestId); + const availableConnections = connections + .filter(connection => !this._connectionsInUse.has(connection)) + .sort((a, b) => b.congestionWindow - a.congestionWindow); + + const observedConnectionWasReused = !!this._connectionReusedByRequestId.get(record.requestId); + + /** @type {TcpConnection|undefined} */ + let connectionToUse = availableConnections[0]; + if (!options.ignoreConnectionReused) { + connectionToUse = availableConnections.find( + connection => connection.isWarm() === observedConnectionWasReused + ); + } - let connectionToUse = availableWarmConnection || availableColdConnection; - if (needsColdConnection) connectionToUse = availableColdConnection || availableWarmConnection; - if (needsWarmConnection) connectionToUse = availableWarmConnection; if (!connectionToUse) return null; this._connectionsInUse.add(connectionToUse); diff --git a/lighthouse-core/lib/dependency-graph/simulator/simulator.js b/lighthouse-core/lib/dependency-graph/simulator/simulator.js index 2397537cfd49..124be075f597 100644 --- a/lighthouse-core/lib/dependency-graph/simulator/simulator.js +++ b/lighthouse-core/lib/dependency-graph/simulator/simulator.js @@ -52,7 +52,7 @@ class Simulator { this._layoutTaskMultiplier = this._cpuSlowdownMultiplier * this._options.layoutTaskMultiplier; // Properties reset on every `.simulate` call but duplicated here for type checking - this._ignoreObserved = false; + this._flexibleOrdering = false; this._nodeTimings = new Map(); this._numberInProgressByType = new Map(); this._nodes = {}; @@ -154,7 +154,7 @@ class Simulator { */ _acquireConnection(record) { return this._connectionPool.acquire(record, { - ignoreObserved: this._ignoreObserved, + ignoreConnectionReused: this._flexibleOrdering, }); } @@ -292,13 +292,13 @@ class Simulator { /** * Estimates the time taken to process all of the graph's nodes. * @param {Node} graph - * @param {{ignoreObserved?: boolean}=} options + * @param {{flexibleOrdering?: boolean}=} options * @return {LH.Gatherer.Simulation.Result} */ simulate(graph, options) { - options = Object.assign({ignoreObserved: false}, options); + options = Object.assign({flexibleOrdering: false}, options); // initialize the necessary data containers - this._ignoreObserved = options.ignoreObserved; + this._flexibleOrdering = options.flexibleOrdering; this._initializeConnectionPool(graph); this._initializeAuxiliaryData(); diff --git a/lighthouse-core/test/lib/dependency-graph/simulator/connection-pool-test.js b/lighthouse-core/test/lib/dependency-graph/simulator/connection-pool-test.js index fc77384be623..a3da1d45850e 100644 --- a/lighthouse-core/test/lib/dependency-graph/simulator/connection-pool-test.js +++ b/lighthouse-core/test/lib/dependency-graph/simulator/connection-pool-test.js @@ -92,16 +92,23 @@ describe('DependencyGraph/Simulator/ConnectionPool', () => { assert.deepStrictEqual(pool.connectionsInUse(), [connectionForA, connectionForB]); }); - it('should return null when available connections are exhausted', () => { + it('should allocate at least 6 connections', () => { + const pool = new ConnectionPool([record()], {rtt, throughput}); + for (let i = 0; i < 6; i++) { + assert.ok(pool.acquire(record()), `did not find connection for ${i}th record`); + } + }); + + it('should allocate all connections', () => { const records = new Array(7).fill(undefined, 0, 7).map(() => record()); const pool = new ConnectionPool(records, {rtt, throughput}); const connections = records.map(record => pool.acquire(record)); assert.ok(connections[0], 'did not find connection for 1st record'); assert.ok(connections[5], 'did not find connection for 6th record'); - assert.equal(connections[6], null); + assert.ok(connections[6], 'did not find connection for 7th record'); }); - it('should respect observed connection reuse', () => { + it.only('should respect observed connection reuse', () => { const coldRecord = record(); const warmRecord = record(); const pool = new ConnectionPool([coldRecord, warmRecord], {rtt, throughput}); @@ -118,13 +125,14 @@ describe('DependencyGraph/Simulator/ConnectionPool', () => { assert.equal(pool.acquire(coldRecord), connections[1], 'should have cold connection'); assert.equal(pool.acquire(warmRecord), connections[0], 'should have warm connection'); + pool.release(coldRecord); + pool.release(warmRecord); connections.forEach(connection => { connection.setWarmed(true); }); - // still allow a cold to receive a warm, just don't prefer it - assert.ok(pool.acquire(coldRecord), 'should have acquired connection'); + assert.ok(!pool.acquire(coldRecord), 'should not have acquired connection'); assert.ok(pool.acquire(warmRecord), 'should have acquired connection'); }); @@ -134,7 +142,7 @@ describe('DependencyGraph/Simulator/ConnectionPool', () => { const pool = new ConnectionPool([coldRecord, warmRecord], {rtt, throughput}); pool._connectionReusedByRequestId.set(warmRecord.requestId, true); - const opts = {ignoreObserved: true}; + const opts = {ignoreConnectionReused: true}; assert.ok(pool.acquire(coldRecord, opts), 'should have acquired connection'); assert.ok(pool.acquire(warmRecord, opts), 'should have acquired connection'); pool.release(coldRecord); @@ -179,8 +187,10 @@ describe('DependencyGraph/Simulator/ConnectionPool', () => { }); it('frees the connection for reissue', () => { - const records = new Array(7).fill(undefined, 0, 7).map(() => record()); + const records = new Array(6).fill(undefined, 0, 7).map(() => record()); const pool = new ConnectionPool(records, {rtt, throughput}); + records.push(record()); + records.forEach(record => pool.acquire(record)); assert.equal(pool.connectionsInUse().length, 6); From 0dff54d12342bd0136ee60cc0a4e654f0e162ff1 Mon Sep 17 00:00:00 2001 From: Patrick Hulce Date: Tue, 1 May 2018 19:23:51 -0700 Subject: [PATCH 4/6] add comment --- .../lib/dependency-graph/simulator/connection-pool.js | 7 +++++++ .../lib/dependency-graph/simulator/connection-pool-test.js | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js b/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js index 62366a568990..c49080c04ca5 100644 --- a/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js +++ b/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js @@ -91,6 +91,13 @@ module.exports = class ConnectionPool { } /** + * This method finds an available connection to the origin specified by the network record or null + * if no connection was available. If returned, connection will not be available for other network + * records until release is called. + * + * If ignoreConnectionReused is true, acquire will consider all connections not in use as available. + * Otherwise, only connections that have matching "warmth" are considered available. + * * @param {LH.WebInspector.NetworkRequest} record * @param {{ignoreConnectionReused?: boolean}} options * @return {?TcpConnection} diff --git a/lighthouse-core/test/lib/dependency-graph/simulator/connection-pool-test.js b/lighthouse-core/test/lib/dependency-graph/simulator/connection-pool-test.js index a3da1d45850e..2947f3e7e27e 100644 --- a/lighthouse-core/test/lib/dependency-graph/simulator/connection-pool-test.js +++ b/lighthouse-core/test/lib/dependency-graph/simulator/connection-pool-test.js @@ -108,7 +108,7 @@ describe('DependencyGraph/Simulator/ConnectionPool', () => { assert.ok(connections[6], 'did not find connection for 7th record'); }); - it.only('should respect observed connection reuse', () => { + it('should respect observed connection reuse', () => { const coldRecord = record(); const warmRecord = record(); const pool = new ConnectionPool([coldRecord, warmRecord], {rtt, throughput}); From 1b3c2a794bd317a64de66db877a28cea84d20a2b Mon Sep 17 00:00:00 2001 From: Patrick Hulce Date: Wed, 2 May 2018 14:58:43 -0700 Subject: [PATCH 5/6] feedback --- lighthouse-core/audits/uses-rel-preload.js | 5 +++++ .../lib/dependency-graph/simulator/connection-pool.js | 9 ++++++--- .../lib/dependency-graph/simulator/simulator.js | 11 ++++++++++- .../lib/dependency-graph/simulator/tcp-connection.js | 3 +-- lighthouse-core/test/audits/uses-rel-preload-test.js | 6 +++++- 5 files changed, 27 insertions(+), 7 deletions(-) diff --git a/lighthouse-core/audits/uses-rel-preload.js b/lighthouse-core/audits/uses-rel-preload.js index c0fdecd6ce83..17dee2a9cfba 100644 --- a/lighthouse-core/audits/uses-rel-preload.js +++ b/lighthouse-core/audits/uses-rel-preload.js @@ -63,6 +63,8 @@ class UsesRelPreloadAudit extends Audit { return {wastedMs: 0, results: []}; } + // Preload changes the ordering of requests, simulate the original graph with flexible ordering + // to have a reasonable baseline for comparison. const simulationBeforeChanges = simulator.simulate(graph, {flexibleOrdering: true}); const modifiedGraph = graph.cloneWithRelationships(); @@ -92,6 +94,7 @@ class UsesRelPreloadAudit extends Audit { node.addDependency(mainDocumentNode); } + // Once we've modified the dependencies, simulate the new graph with flexible ordering. const simulationAfterChanges = simulator.simulate(modifiedGraph, {flexibleOrdering: true}); const originalNodesByRecord = Array.from(simulationBeforeChanges.nodeTimings.keys()) .reduce((map, node) => map.set(node.record, node), new Map()); @@ -140,6 +143,8 @@ class UsesRelPreloadAudit extends Audit { const criticalRequests = UsesRelPreloadAudit._flattenRequests(critChains, 3 + mainResourceIndex, 2 + mainResourceIndex); + + /** @type {Set} */ const urls = new Set(); for (const networkRecord of criticalRequests) { if (!networkRecord._isLinkPreload && networkRecord.protocol !== 'data') { diff --git a/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js b/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js index c49080c04ca5..5483ae7f3ac0 100644 --- a/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js +++ b/lighthouse-core/lib/dependency-graph/simulator/connection-pool.js @@ -11,6 +11,10 @@ const TcpConnection = require('./tcp-connection'); const DEFAULT_SERVER_RESPONSE_TIME = 30; const TLS_SCHEMES = ['https', 'wss']; +// Each origin can have 6 simulatenous connections open +// https://cs.chromium.org/chromium/src/net/socket/client_socket_pool_manager.cc?type=cs&q="int+g_max_sockets_per_group" +const CONNECTIONS_PER_ORIGIN = 6; + module.exports = class ConnectionPool { /** * @param {LH.WebInspector.NetworkRequest[]} records @@ -82,9 +86,8 @@ module.exports = class ConnectionPool { throw new Error(`Could not find a connection for origin: ${origin}`); } - // Make sure each origin has 6 connections available - // https://cs.chromium.org/chromium/src/net/socket/client_socket_pool_manager.cc?type=cs&q="int+g_max_sockets_per_group" - while (connections.length < 6) connections.push(connections[0].clone()); + // Make sure each origin has minimum number of connections available for max throughput + while (connections.length < CONNECTIONS_PER_ORIGIN) connections.push(connections[0].clone()); this._connectionsByOrigin.set(origin, connections); } diff --git a/lighthouse-core/lib/dependency-graph/simulator/simulator.js b/lighthouse-core/lib/dependency-graph/simulator/simulator.js index 124be075f597..94106543b5b7 100644 --- a/lighthouse-core/lib/dependency-graph/simulator/simulator.js +++ b/lighthouse-core/lib/dependency-graph/simulator/simulator.js @@ -223,6 +223,7 @@ class Simulator { const record = /** @type {NetworkNode} */ (node).record; const timingData = this._nodeTimings.get(node); + // If we're estimating time remaining, we already acquired a connection for this record, definitely non-null const connection = /** @type {TcpConnection} */ (this._acquireConnection(record)); const calculation = connection.simulateDownloadUntil( record.transferSize - timingData.bytesDownloaded, @@ -266,6 +267,7 @@ class Simulator { if (node.type !== Node.TYPES.NETWORK) throw new Error('Unsupported'); const record = /** @type {NetworkNode} */ (node).record; + // If we're updating the progress, we already acquired a connection for this record, definitely non-null const connection = /** @type {TcpConnection} */ (this._acquireConnection(record)); const calculation = connection.simulateDownloadUntil( record.transferSize - timingData.bytesDownloaded, @@ -290,7 +292,14 @@ class Simulator { } /** - * Estimates the time taken to process all of the graph's nodes. + * Estimates the time taken to process all of the graph's nodes, returns the overall time along with + * each node annotated by start/end times. + * + * If flexibleOrdering is set, simulator/connection pool are allowed to deviate from what was + * observed in the trace/devtoolsLog and start requests as soon as they are queued (i.e. do not + * wait around for a warm connection to be available if the original record was fetched on a warm + * connection). + * * @param {Node} graph * @param {{flexibleOrdering?: boolean}=} options * @return {LH.Gatherer.Simulation.Result} diff --git a/lighthouse-core/lib/dependency-graph/simulator/tcp-connection.js b/lighthouse-core/lib/dependency-graph/simulator/tcp-connection.js index 7aa6017ea17d..56c41c0c2297 100644 --- a/lighthouse-core/lib/dependency-graph/simulator/tcp-connection.js +++ b/lighthouse-core/lib/dependency-graph/simulator/tcp-connection.js @@ -106,8 +106,7 @@ class TcpConnection { * @return {TcpConnection} */ clone() { - // @ts-ignore - return Object.assign(new TcpConnection(), this); + return Object.assign(new TcpConnection(this._rtt, this._throughput), this); } /** diff --git a/lighthouse-core/test/audits/uses-rel-preload-test.js b/lighthouse-core/test/audits/uses-rel-preload-test.js index 4137405e56f9..294d74998698 100644 --- a/lighthouse-core/test/audits/uses-rel-preload-test.js +++ b/lighthouse-core/test/audits/uses-rel-preload-test.js @@ -39,6 +39,10 @@ describe('Performance: uses-rel-preload audit', () => { return new NetworkNode({url, requestId}); } + afterEach(() => { + mockSimulator = undefined; + }); + it('should suggest preload resource', () => { const rootNode = buildNode(1, 'http://example.com'); const mainDocumentNode = buildNode(2, 'http://www.example.com'); @@ -71,7 +75,7 @@ describe('Performance: uses-rel-preload audit', () => { nodeTimings.set(scriptAddedNodeLocal, {startTime: 1000, endTime: 2000}); } - return {nodeTimings}; + return {timeInMs: 3250, nodeTimings}; }, }; From 2ec133c33b03a53c675009802ec47749149bc6d2 Mon Sep 17 00:00:00 2001 From: Patrick Hulce Date: Wed, 2 May 2018 17:05:49 -0700 Subject: [PATCH 6/6] weak smoke test --- .../test/audits/uses-rel-preload-test.js | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/lighthouse-core/test/audits/uses-rel-preload-test.js b/lighthouse-core/test/audits/uses-rel-preload-test.js index 294d74998698..d96db4e75cb4 100644 --- a/lighthouse-core/test/audits/uses-rel-preload-test.js +++ b/lighthouse-core/test/audits/uses-rel-preload-test.js @@ -11,6 +11,11 @@ const UsesRelPreload = require('../../audits/uses-rel-preload.js'); const NetworkNode = require('../../lib/dependency-graph/network-node'); const assert = require('assert'); + +const Runner = require('../../runner'); +const pwaTrace = require('../fixtures/traces/progressive-app-m60.json'); +const pwaDevtoolsLog = require('../fixtures/traces/progressive-app-m60.devtools.log.json'); + const defaultMainResource = { _endTime: 1, }; @@ -189,4 +194,21 @@ describe('Performance: uses-rel-preload audit', () => { assert.equal(output.details.items.length, 0); }); }); + + it('does no throw on a real trace/devtools log', async () => { + const artifacts = Object.assign({ + URL: {finalUrl: 'https://pwa.rocks/'}, + traces: { + [UsesRelPreload.DEFAULT_PASS]: pwaTrace, + }, + devtoolsLogs: { + [UsesRelPreload.DEFAULT_PASS]: pwaDevtoolsLog, + }, + }, Runner.instantiateComputedArtifacts()); + + const settings = {throttlingMethod: 'provided'}; + const result = await UsesRelPreload.audit(artifacts, {settings}); + assert.equal(result.score, 1); + assert.equal(result.rawValue, 0); + }); });