Skip to content

Commit

Permalink
Add parcel query option to view de/serialization times & graph size (#…
Browse files Browse the repository at this point in the history
…9361)

* add parcel query option to view serialization times

* flow

* make serialize wrapper

* format name col
  • Loading branch information
AGawrys authored Nov 7, 2023
1 parent a5c60d7 commit 96185d0
Show file tree
Hide file tree
Showing 2 changed files with 99 additions and 16 deletions.
63 changes: 59 additions & 4 deletions packages/dev/query/src/cli.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import repl from 'repl';
import os from 'os';
import nullthrows from 'nullthrows';
import invariant from 'assert';
import {serialize} from 'v8';

// $FlowFixMe
import {table} from 'table';
Expand Down Expand Up @@ -37,9 +38,8 @@ export async function run(input: string[]) {
}

console.log('Loading graphs...');
let {assetGraph, bundleGraph, bundleInfo, requestTracker} = await loadGraphs(
cacheDir,
);
let {assetGraph, bundleGraph, bundleInfo, requestTracker, cacheInfo} =
await loadGraphs(cacheDir);

if (requestTracker == null) {
console.error('Request Graph could not be found');
Expand Down Expand Up @@ -591,7 +591,54 @@ export async function run(input: string[]) {

return entryBundleGroup;
}
// eslint-disable-next-line no-unused-vars
function inspectCache(_) {
// displays sizing of various entries of the cache
let table: Array<Array<string | number>> = [];
table.push([
'Graphs',
'Size (bytes)',
'Deserialize (ms)',
'Serialize (ms)',
]);
let serialized: Map<string, number> = new Map();
serialized.set('RequestGraph', timeSerialize(requestTracker));
serialized.set('bundle_graph_request', timeSerialize(bundleGraph));
serialized.set('asset_graph_request', timeSerialize(assetGraph));
for (let [k, v] of nullthrows(cacheInfo).entries()) {
let s = serialized.get(k);
invariant(s != null);
let name = k.includes('_request') ? k.split('_request')[0] : k;
table.push([name, ...v, s]);
}
function getColumnSum(t: Array<Array<string | number>>, col: number) {
if (t == null) {
return '';
}
const initialValue = 0;
let column = t.map(r => r[col]);
column.shift();
invariant(column != null);
return column.reduce(
(accumulator, currentValue) => accumulator + currentValue,
initialValue,
);
}
table.push([
'Totals',
getColumnSum(table, 1),
getColumnSum(table, 2),
getColumnSum(table, 3),
]);
_printStatsTable('Cache Info', table);
}

function timeSerialize(graph) {
let date = Date.now();
serialize(graph);
date = Date.now() - date;
return date;
}
function _printStatsTable(header, data) {
const config = {
columnDefault: {
Expand Down Expand Up @@ -738,7 +785,8 @@ export async function run(input: string[]) {
server.context.assetGraph = assetGraph;
// $FlowFixMe[prop-missing]
server.context.requestTracker = requestTracker;

// $FlowFixMe[prop-missing]
server.context.cacheInfo = cacheInfo;
for (let [name, cmd] of new Map([
[
'getAsset',
Expand Down Expand Up @@ -887,6 +935,13 @@ export async function run(input: string[]) {
action: findAsset,
},
],
[
'inspectCache',
{
help: 'Cache Information',
action: inspectCache,
},
],
[
'findAssetWithSymbol',
{
Expand Down
52 changes: 40 additions & 12 deletions packages/dev/query/src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ export async function loadGraphs(cacheDir: string): Promise<{|
bundleGraph: ?BundleGraph,
requestTracker: ?RequestTracker,
bundleInfo: ?Map<ContentKey, PackagedBundleInfo>,
cacheInfo: ?Map<string, Array<string | number>>,
|}> {
function filesBySizeAndModifiedTime() {
let files = fs.readdirSync(cacheDir).map(f => {
Expand All @@ -38,28 +39,40 @@ export async function loadGraphs(cacheDir: string): Promise<{|
return files.map(([f]) => f);
}

let cacheInfo: Map<string, Array<string | number>> = new Map();
let timeToDeserialize = 0;

let requestTracker;
const cache = new LMDBCache(cacheDir);
for (let f of filesBySizeAndModifiedTime()) {
// Empty filename or not the first chunk
if (path.extname(f) !== '' && !f.endsWith('-0')) continue;
try {
let obj = v8.deserialize(
await cache.getLargeBlob(path.basename(f).slice(0, -'-0'.length)),
let file = await cache.getLargeBlob(
path.basename(f).slice(0, -'-0'.length),
);

cacheInfo.set('RequestGraph', [Buffer.byteLength(file)]);

timeToDeserialize = Date.now();
let obj = v8.deserialize(file);
timeToDeserialize = Date.now() - timeToDeserialize;

/* if (obj.assetGraph != null && obj.assetGraph.value.hash != null) {
assetGraph = AssetGraph.deserialize(obj.assetGraph.value);
} else if (obj.bundleGraph != null) {
bundleGraph = BundleGraph.deserialize(obj.bundleGraph.value);
} else */
if (obj['$$type']?.endsWith('RequestGraph')) {
let date = Date.now();
requestTracker = new RequestTracker({
graph: RequestGraph.deserialize(obj.value),
// $FlowFixMe
farm: null,
// $FlowFixMe
options: null,
});
timeToDeserialize += Date.now() - date;
break;
}
} catch (e) {
Expand All @@ -75,7 +88,8 @@ export async function loadGraphs(cacheDir: string): Promise<{|

// Load graphs by finding the main subrequests and loading their results
let assetGraph, bundleGraph, bundleInfo;

cacheInfo.set('bundle_graph_request', []);
cacheInfo.set('asset_graph_request', []);
invariant(requestTracker);
let buildRequestId = requestTracker.graph.getNodeIdByContentKey(
'parcel_build_request',
Expand All @@ -94,21 +108,26 @@ export async function loadGraphs(cacheDir: string): Promise<{|
);
if (bundleGraphRequestNode != null) {
bundleGraph = BundleGraph.deserialize(
(await loadLargeBlobRequestRequest(cache, bundleGraphRequestNode))
.bundleGraph.value,
(
await loadLargeBlobRequestRequest(
cache,
bundleGraphRequestNode,
cacheInfo,
)
).bundleGraph.value,
);

let assetGraphRequest = getSubRequests(
requestTracker.graph.getNodeIdByContentKey(bundleGraphRequestNode.id),
).find(n => n.type === 'request' && n.value.type === 'asset_graph_request');
if (assetGraphRequest != null) {
assetGraph = AssetGraph.deserialize(
(await loadLargeBlobRequestRequest(cache, assetGraphRequest)).assetGraph
.value,
(await loadLargeBlobRequestRequest(cache, assetGraphRequest, cacheInfo))
.assetGraph.value,
);
}
}

cacheInfo.get('RequestGraph')?.push(timeToDeserialize);
let writeBundlesRequest = buildRequestSubRequests.find(
n => n.type === 'request' && n.value.type === 'write_bundles_request',
);
Expand All @@ -121,12 +140,21 @@ export async function loadGraphs(cacheDir: string): Promise<{|
>);
}

return {assetGraph, bundleGraph, requestTracker, bundleInfo};
return {assetGraph, bundleGraph, requestTracker, bundleInfo, cacheInfo};
}

async function loadLargeBlobRequestRequest(cache, node) {
async function loadLargeBlobRequestRequest(cache, node, cacheInfo) {
invariant(node.type === 'request');
return v8.deserialize(
await cache.getLargeBlob(nullthrows(node.value.resultCacheKey)),

let cachedFile = await cache.getLargeBlob(
nullthrows(node.value.resultCacheKey),
);
cacheInfo.get(node.value.type)?.push(cachedFile.byteLength); //Add size

let TTD = Date.now();
let result = v8.deserialize(cachedFile);
TTD = Date.now() - TTD;
cacheInfo.get(node.value.type)?.push(TTD);

return result;
}

0 comments on commit 96185d0

Please sign in to comment.