Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

tests: update many test traces, support .json.gz #16007

Merged
merged 10 commits into from
May 30, 2024
4 changes: 2 additions & 2 deletions core/computed/metrics/interactive.js
Original file line number Diff line number Diff line change
Expand Up @@ -187,6 +187,6 @@ export {InteractiveComputed as Interactive};

/**
* @typedef TimePeriod
* @property {number} start
* @property {number} end
* @property {number} start In ms.
* @property {number} end In ms.
*/
6 changes: 3 additions & 3 deletions core/gather/driver/network-monitor.js
Original file line number Diff line number Diff line change
Expand Up @@ -202,6 +202,7 @@ class NetworkMonitor extends NetworkMonitorEventEmitter {
/**
* Finds all time periods where the number of inflight requests is less than or equal to the
* number of allowed concurrent requests.
* The time periods returned are in ms.
* @param {Array<LH.Artifacts.NetworkRequest>} requests
* @param {number} allowedConcurrentRequests
* @param {number=} endTime
Expand All @@ -215,10 +216,9 @@ class NetworkMonitor extends NetworkMonitorEventEmitter {
if (UrlUtils.isNonNetworkProtocol(request.protocol)) return;
if (request.protocol === 'ws' || request.protocol === 'wss') return;

// convert the network timestamp to ms
timeBoundaries.push({time: request.networkRequestTime * 1000, isStart: true});
timeBoundaries.push({time: request.networkRequestTime, isStart: true});
if (request.finished) {
timeBoundaries.push({time: request.networkEndTime * 1000, isStart: false});
connorjclark marked this conversation as resolved.
Show resolved Hide resolved
timeBoundaries.push({time: request.networkEndTime, isStart: false});
}
});

Expand Down
103 changes: 75 additions & 28 deletions core/lib/asset-saver.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import path from 'path';
import stream from 'stream';
import url from 'url';
import {createGzip, gunzipSync} from 'zlib';

import log from 'lighthouse-logger';

Expand All @@ -33,6 +34,46 @@
* @property {LH.DevtoolsLog} [devtoolsLog]
*/

/**
* @param {import('stream').PipelineSource<any>} contents
* @param {string} path
* @param {boolean} gzip
*/
async function writeJson(contents, path, gzip) {
const writeStream = fs.createWriteStream(gzip ? path + '.gz' : path);
if (gzip) {
await stream.promises.pipeline(contents, createGzip(), writeStream);

Check warning on line 45 in core/lib/asset-saver.js

View check run for this annotation

Codecov / codecov/patch

core/lib/asset-saver.js#L45

Added line #L45 was not covered by tests
} else {
await stream.promises.pipeline(contents, writeStream);
}
}

/**
* Prefers reading a gzipped file (.gz) if present.
* @param {string} filename
* @param {(this: any, key: string, value: any) => any=} reviver
*/
function readJson(filename, reviver) {
if (fs.existsSync(filename + '.gz')) {
filename = filename + '.gz';
}

Check warning on line 59 in core/lib/asset-saver.js

View check run for this annotation

Codecov / codecov/patch

core/lib/asset-saver.js#L58-L59

Added lines #L58 - L59 were not covered by tests

if (!filename.endsWith('.json.gz')) {
return JSON.parse(fs.readFileSync(filename, 'utf8'), reviver);
}

const buffer = gunzipSync(fs.readFileSync(filename));
return JSON.parse(buffer.toString('utf8'), reviver);

Check warning on line 66 in core/lib/asset-saver.js

View check run for this annotation

Codecov / codecov/patch

core/lib/asset-saver.js#L64-L66

Added lines #L64 - L66 were not covered by tests
}

/**
* @param {string} filename
* @param {string} suffix
* @returns
*/
function endsWithSuffix(filename, suffix) {
return filename.endsWith(suffix) || filename.endsWith(suffix + '.gz');
}

/**
* Load artifacts object from files located within basePath
Expand All @@ -48,16 +89,15 @@
}

// load artifacts.json using a reviver to deserialize any LighthouseErrors in artifacts.
const artifactsStr = fs.readFileSync(path.join(basePath, artifactsFilename), 'utf8');
/** @type {LH.Artifacts} */
const artifacts = JSON.parse(artifactsStr, LighthouseError.parseReviver);
const artifacts = readJson(path.join(basePath, artifactsFilename), LighthouseError.parseReviver);

const filenames = fs.readdirSync(basePath);

filenames.filter(f => f.endsWith(devtoolsLogSuffix)).forEach(filename => {
filenames.filter(f => endsWithSuffix(f, devtoolsLogSuffix)).forEach(filename => {
if (!artifacts.devtoolsLogs) artifacts.devtoolsLogs = {};
const prefix = filename.replace(devtoolsLogSuffix, '');
const devtoolsLog = JSON.parse(fs.readFileSync(path.join(basePath, filename), 'utf8'));
const prefix = filename.replace(devtoolsLogSuffix + '.gz', '').replace(devtoolsLogSuffix, '');
const devtoolsLog = readJson(path.join(basePath, filename));
artifacts.devtoolsLogs[prefix] = devtoolsLog;
if (prefix === defaultPrefix) {
artifacts.DevtoolsLog = devtoolsLog;
Expand All @@ -67,11 +107,10 @@
}
});

filenames.filter(f => f.endsWith(traceSuffix)).forEach(filename => {
filenames.filter(f => endsWithSuffix(f, traceSuffix)).forEach(filename => {
if (!artifacts.traces) artifacts.traces = {};
const file = fs.readFileSync(path.join(basePath, filename), {encoding: 'utf-8'});
const trace = JSON.parse(file);
const prefix = filename.replace(traceSuffix, '');
const trace = readJson(path.join(basePath, filename));
const prefix = filename.replace(traceSuffix + '.gz', '').replace(traceSuffix, '');
artifacts.traces[prefix] = Array.isArray(trace) ? {traceEvents: trace} : trace;
if (prefix === defaultPrefix) {
artifacts.Trace = artifacts.traces[prefix];
Expand Down Expand Up @@ -200,21 +239,25 @@

/**
* Save artifacts object mostly to single file located at basePath/artifacts.json.
* Also save the traces & devtoolsLogs to their own files
* Also save the traces & devtoolsLogs to their own files, with optional compression.
* @param {LH.Artifacts} artifacts
* @param {string} basePath
* @param {{gzip?: boolean}} options
* @return {Promise<void>}
*/
async function saveArtifacts(artifacts, basePath) {
async function saveArtifacts(artifacts, basePath, options = {}) {
const status = {msg: 'Saving artifacts', id: 'lh:assetSaver:saveArtifacts'};
log.time(status);
fs.mkdirSync(basePath, {recursive: true});

// Delete any previous artifacts in this directory.
const filenames = fs.readdirSync(basePath);
for (const filename of filenames) {
if (filename.endsWith(traceSuffix) || filename.endsWith(devtoolsLogSuffix) ||
filename === artifactsFilename) {
const isPreviousFile =
filename.endsWith(traceSuffix) || filename.endsWith(devtoolsLogSuffix) ||
filename.endsWith(traceSuffix + '.gz') || filename.endsWith(devtoolsLogSuffix + '.gz') ||
filename === artifactsFilename || filename === artifactsFilename + '.gz';
if (isPreviousFile) {

Check warning on line 260 in core/lib/asset-saver.js

View check run for this annotation

Codecov / codecov/patch

core/lib/asset-saver.js#L256-L260

Added lines #L256 - L260 were not covered by tests
fs.unlinkSync(`${basePath}/${filename}`);
}
}
Expand All @@ -234,24 +277,30 @@
} = artifacts;

if (Trace) {
await saveTrace(Trace, `${basePath}/${defaultPrefix}${traceSuffix}`);
await saveTrace(Trace, `${basePath}/${defaultPrefix}${traceSuffix}`, options);
}

if (TraceError) {
await saveTrace(TraceError, `${basePath}/${errorPrefix}${traceSuffix}`);
await saveTrace(TraceError, `${basePath}/${errorPrefix}${traceSuffix}`, options);
}

if (DevtoolsLog) {
await saveDevtoolsLog(DevtoolsLog, `${basePath}/${defaultPrefix}${devtoolsLogSuffix}`);
await saveDevtoolsLog(
DevtoolsLog, `${basePath}/${defaultPrefix}${devtoolsLogSuffix}`, options);
}

if (DevtoolsLogError) {
await saveDevtoolsLog(DevtoolsLogError, `${basePath}/${errorPrefix}${devtoolsLogSuffix}`);
await saveDevtoolsLog(
DevtoolsLogError, `${basePath}/${errorPrefix}${devtoolsLogSuffix}`, options);
}

// save everything else, using a replacer to serialize LighthouseErrors in the artifacts.
const restArtifactsString = JSON.stringify(restArtifacts, stringifyReplacer, 2) + '\n';
fs.writeFileSync(`${basePath}/${artifactsFilename}`, restArtifactsString, 'utf8');
const restArtifactsString = JSON.stringify(restArtifacts, stringifyReplacer, 2);
await writeJson(function* () {
yield restArtifactsString;
yield '\n';
}, `${basePath}/${artifactsFilename}`, !!options.gzip);

log.log('Artifacts saved to disk in folder:', basePath);
log.timeEnd(status);
}
Expand Down Expand Up @@ -371,28 +420,26 @@
* Save a trace as JSON by streaming to disk at traceFilename.
* @param {LH.Trace} traceData
* @param {string} traceFilename
* @param {{gzip?: boolean}=} options
* @return {Promise<void>}
*/
async function saveTrace(traceData, traceFilename) {
function saveTrace(traceData, traceFilename, options = {}) {
const traceIter = traceJsonGenerator(traceData);
const writeStream = fs.createWriteStream(traceFilename);

return stream.promises.pipeline(traceIter, writeStream);
return writeJson(traceIter, traceFilename, !!options.gzip);
}

/**
* Save a devtoolsLog as JSON by streaming to disk at devtoolLogFilename.
* @param {LH.DevtoolsLog} devtoolsLog
* @param {string} devtoolLogFilename
* @param {{gzip?: boolean}=} options
* @return {Promise<void>}
*/
function saveDevtoolsLog(devtoolsLog, devtoolLogFilename) {
const writeStream = fs.createWriteStream(devtoolLogFilename);

return stream.promises.pipeline(function* () {
function saveDevtoolsLog(devtoolsLog, devtoolLogFilename, options = {}) {
return writeJson(function* () {
yield* arrayOfObjectsJsonGenerator(devtoolsLog);
yield '\n';
}, writeStream);
}, devtoolLogFilename, !!options.gzip);
}

/**
Expand Down
56 changes: 5 additions & 51 deletions core/test/audits/byte-efficiency/duplicated-javascript-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,10 @@
* SPDX-License-Identifier: Apache-2.0
*/

import {LH_ROOT} from '../../../../shared/root.js';
import DuplicatedJavascript from '../../../audits/byte-efficiency/duplicated-javascript.js';
import {
loadSourceMapFixture,
createScript,
getURLArtifactFromDevtoolsLog,
readJson,
} from '../../test-utils.js';

const trace = readJson('../../fixtures/traces/lcp-m78.json', import.meta);
const devtoolsLog = readJson('../../fixtures/traces/lcp-m78.devtools.log.json', import.meta);
import {loadArtifacts} from '../../../lib/asset-saver.js';
import {loadSourceMapFixture, createScript} from '../../test-utils.js';

describe('DuplicatedJavascript computed artifact', () => {
it('works (simple)', async () => {
Expand Down Expand Up @@ -319,54 +313,14 @@ describe('DuplicatedJavascript computed artifact', () => {
});

it('.audit', async () => {
// Use a real trace fixture, but the bundle stuff.
// Note: this mixing of data from different sources makes the exact results
// of this audit pretty meaningless. The important part of this test is that
// `wastedBytesByUrl` is functioning.
const bundleData1 = loadSourceMapFixture('coursehero-bundle-1');
const bundleData2 = loadSourceMapFixture('coursehero-bundle-2');
const artifacts = {
URL: getURLArtifactFromDevtoolsLog(devtoolsLog),
GatherContext: {gatherMode: 'navigation'},
devtoolsLogs: {
[DuplicatedJavascript.DEFAULT_PASS]: devtoolsLog,
},
traces: {
[DuplicatedJavascript.DEFAULT_PASS]: trace,
},
SourceMaps: [
{
scriptId: '1',
scriptUrl: 'https://www.paulirish.com/javascripts/firebase-performance.js',
map: bundleData1.map,
},
{
scriptId: '2',
scriptUrl: 'https://www.paulirish.com/javascripts/firebase-app.js',
map: bundleData2.map,
},
],
Scripts: [
{
scriptId: '1',
url: 'https://www.paulirish.com/javascripts/firebase-performance.js',
content: bundleData1.content,
},
{
scriptId: '2',
url: 'https://www.paulirish.com/javascripts/firebase-app.js',
content: bundleData2.content,
},
].map(createScript),
};

const artifacts = await loadArtifacts(`${LH_ROOT}/core/test/fixtures/artifacts/cnn`);
const ultraSlowThrottling = {rttMs: 150, throughputKbps: 100, cpuSlowdownMultiplier: 8};
const settings = {throttlingMethod: 'simulate', throttling: ultraSlowThrottling};
const context = {settings, computedCache: new Map()};
const results = await DuplicatedJavascript.audit(artifacts, context);

// Without the `wastedBytesByUrl` this would be zero because the items don't define a url.
expect(results.details.overallSavingsMs).toBe(1370);
expect(results.details.overallSavingsMs).toBe(160);
});

it('_getNodeModuleName', () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,8 +33,8 @@ describe('Render blocking resources audit', () => {
const computedCache = new Map();
const result = await RenderBlockingResourcesAudit.audit(artifacts, {settings, computedCache});
assert.equal(result.score, 0);
assert.equal(result.numericValue, 304);
assert.deepStrictEqual(result.metricSavings, {FCP: 304, LCP: 0});
assert.equal(result.numericValue, 300);
assert.deepStrictEqual(result.metricSavings, {FCP: 300, LCP: 0});
});

it('evaluates correct wastedMs when LCP is text', async () => {
Expand All @@ -59,7 +59,7 @@ describe('Render blocking resources audit', () => {
const settings = {throttlingMethod: 'simulate', throttling: mobileSlow4G};
const computedCache = new Map();
const result = await RenderBlockingResourcesAudit.audit(artifacts, {settings, computedCache});
assert.deepStrictEqual(result.metricSavings, {FCP: 304, LCP: 304});
assert.deepStrictEqual(result.metricSavings, {FCP: 300, LCP: 300});
});

it('evaluates amp page correctly', async () => {
Expand All @@ -86,14 +86,14 @@ describe('Render blocking resources audit', () => {
expect(result.details.items).toEqual([
{
totalBytes: 389629,
url: 'http://localhost:57822/style.css',
url: 'http://localhost:50049/style.css',
// This value would be higher if we didn't have a special case for AMP stylesheets
wastedMs: 1489,
wastedMs: 1496,
},
{
totalBytes: 291,
url: 'http://localhost:57822/script.js',
wastedMs: 311,
url: 'http://localhost:50049/script.js',
wastedMs: 304,
},
]);
expect(result.metricSavings).toEqual({FCP: 0, LCP: 0});
Expand Down
Loading
Loading