Skip to content

Commit

Permalink
allow to compress files in virtual file system (vercel#1115)
Browse files Browse the repository at this point in the history
  • Loading branch information
erossignon committed Jun 7, 2021
1 parent 2070696 commit 215860b
Show file tree
Hide file tree
Showing 29 changed files with 969 additions and 137 deletions.
7 changes: 5 additions & 2 deletions .eslintrc
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
{
// note you must disable the base rule as it can report incorrect errors
"extends": ["airbnb-base", "prettier"],
"rules": {
"no-bitwise": "off",
Expand All @@ -9,8 +10,10 @@
"consistent-return": "off",
"no-restricted-syntax": "off",
"import/prefer-default-export": "off",
"camelcase": "off"
},
"camelcase": "off",
"no-shadow": "off",
"@typescript-eslint/no-shadow": ["error"]
},
"overrides": [
{
"files": ["*.ts"],
Expand Down
12 changes: 11 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,16 @@ requirements to compile original Node.js:

See [pkg-fetch](https://github.com/vercel/pkg-fetch) for more info.

### Compression

Pass `--compress Brotli` or `--compress GZip` to `pkg` to compress further the content of the files store in the exectable.

This option can reduce the size of the embedded file system by up to 60%.

The startup time of the application might be reduced slightly.

`-C` can be used as a shortcut for `--compress `.

### Environment

| Var | Description |
Expand Down Expand Up @@ -358,7 +368,7 @@ and check that all the required files for your application are properly
incorporated to the final executable.

$ pkg --debug app.js -o output
$ DEBUG_PKG output
$ DEBUG_PKG=1 output

or

Expand Down
6 changes: 6 additions & 0 deletions lib/compress_type.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@

export enum CompressType {
None = 0,
GZip = 1,
Brotli = 2
};
7 changes: 7 additions & 0 deletions lib/help.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ export default function help() {
--public speed up and disclose the sources of top-level project
--public-packages force specified packages to be considered public
--no-bytecode skip bytecode generation and include source files as plain js
-C, --compress [default=None] compression algorithm = Brotli or GZip
${chalk.dim('Examples:')}
Expand All @@ -36,5 +37,11 @@ export default function help() {
${chalk.cyan('$ pkg --public-packages "packageA,packageB" index.js')}
${chalk.gray('–')} Consider all packages to be public
${chalk.cyan('$ pkg --public-packages "*" index.js')}
${chalk.gray('–')} Bakes '--expose-gc' into executable
${chalk.cyan('$ pkg --options expose-gc index.js')}
${chalk.gray(
'–'
)} reduce size of the data packed inside the executable with GZip
${chalk.cyan('$ pkg --compress GZip index.js')}
`);
}
30 changes: 30 additions & 0 deletions lib/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ import refine from './refiner';
import { shutdown } from './fabricator';
import walk, { Marker, WalkerParams } from './walker';
import { Target, NodeTarget, SymLinks } from './types';
import { CompressType } from './compress_type';
import { patchMachOExecutable } from './mach-o';

const { version } = JSON.parse(
Expand Down Expand Up @@ -247,6 +248,8 @@ export async function exec(argv2: string[]) {
't',
'target',
'targets',
'C',
'compress',
],
default: { bytecode: true },
});
Expand Down Expand Up @@ -274,6 +277,32 @@ export async function exec(argv2: string[]) {

const forceBuild = argv.b || argv.build;

// doCompress
const algo = argv.C || argv.compress || 'None';

let doCompress: CompressType = CompressType.None;
switch (algo.toLowerCase()) {
case 'brotli':
case 'br':
doCompress = CompressType.Brotli;
break;
case 'gzip':
case 'gz':
doCompress = CompressType.GZip;
break;
case 'none':
break;
default:
// eslint-disable-next-line no-console
throw wasReported(
`Invalid compression algorithm ${algo} ( should be None, Brotli or Gzip)`
);
}
if (doCompress !== CompressType.None) {
// eslint-disable-next-line no-console
console.log('compression: ', CompressType[doCompress]);
}

// _

if (!argv._.length) {
Expand Down Expand Up @@ -631,6 +660,7 @@ export async function exec(argv2: string[]) {
slash: target.platform === 'win' ? '\\' : '/',
target: target as Target,
symLinks,
doCompress,
});

if (target.platform !== 'win' && target.output) {
Expand Down
6 changes: 5 additions & 1 deletion lib/packer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ export default function packer({
}
}
const prelude =
`return (function (REQUIRE_COMMON, VIRTUAL_FILESYSTEM, DEFAULT_ENTRYPOINT, SYMLINKS) {
`return (function (REQUIRE_COMMON, VIRTUAL_FILESYSTEM, DEFAULT_ENTRYPOINT, SYMLINKS, DICT, DOCOMPRESS) {
${bootstrapText}${
log.debugMode ? diagnosticText : ''
}\n})(function (exports) {\n${commonText}\n},\n` +
Expand All @@ -166,6 +166,10 @@ export default function packer({
`%DEFAULT_ENTRYPOINT%` +
`\n,\n` +
`%SYMLINKS%` +
'\n,\n' +
'%DICT%' +
'\n,\n' +
'%DOCOMPRESS%' +
`\n);`;

return { prelude, entrypoint, stripes };
Expand Down
84 changes: 62 additions & 22 deletions lib/producer.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { createBrotliCompress, createGzip } from 'zlib';
import Multistream from 'multistream';
import assert from 'assert';
import { execFileSync } from 'child_process';
Expand All @@ -12,6 +13,7 @@ import { log, wasReported } from './log';
import { fabricateTwice } from './fabricator';
import { platform, SymLinks, Target } from './types';
import { Stripe } from './packer';
import { CompressType } from './compress_type';

interface NotFound {
notFound: true;
Expand Down Expand Up @@ -248,14 +250,34 @@ interface ProducerOptions {
slash: string;
target: Target;
symLinks: SymLinks;
doCompress: CompressType;
}

const fileDictionary: { [key: string]: string } = {};
let counter = 0;
function replace(k: string) {
let existingKey = fileDictionary[k];
if (!existingKey) {
const newkey = counter;
counter += 1;
existingKey = newkey.toString(36);
fileDictionary[k] = existingKey;
}
return existingKey;
}
const separator = '$';

function makeKey(filename: string, slash: string): string {
const a = filename.split(slash).map(replace).join(separator);
return a;
}
export default function producer({
backpack,
bakes,
slash,
target,
symLinks,
doCompress
}: ProducerOptions) {
return new Promise<void>((resolve, reject) => {
if (!Buffer.alloc) {
Expand All @@ -274,18 +296,17 @@ export default function producer({
for (const stripe of stripes) {
let { snap } = stripe;
snap = snapshotify(snap, slash);

if (!vfs[snap]) {
vfs[snap] = {};
}
const vfsKey = makeKey(snap, slash);
if (!vfs[vfsKey]) vfs[vfsKey] = {};
}

const snapshotSymLinks: SymLinks = {};

for (const [key, value] of Object.entries(symLinks)) {
const k = snapshotify(key, slash);
const v = snapshotify(value, slash);
snapshotSymLinks[k] = v;
const vfsKey = makeKey(k, slash);
snapshotSymLinks[vfsKey] = makeKey(v, slash);
}

let meter: streamMeter.StreamMeter;
Expand All @@ -295,6 +316,15 @@ export default function producer({
meter = streamMeter();
return s.pipe(meter);
}
function pipeMayCompressToNewMeter(s: Readable): streamMeter.StreamMeter {
if (doCompress === CompressType.GZip) {
return pipeToNewMeter(s.pipe(createGzip()));
}
if (doCompress === CompressType.Brotli) {
return pipeToNewMeter(s.pipe(createBrotliCompress()));
}
return pipeToNewMeter(s);
}

function next(s: Readable) {
count += 1;
Expand Down Expand Up @@ -327,7 +357,8 @@ export default function producer({
const { store } = prevStripe;
let { snap } = prevStripe;
snap = snapshotify(snap, slash);
vfs[snap][store] = [track, meter.bytes];
const vfsKey = makeKey(snap, slash);
vfs[vfsKey][store] = [track, meter.bytes];
track += meter.bytes;
}

Expand All @@ -353,15 +384,14 @@ export default function producer({
return cb(null, intoStream(Buffer.alloc(0)));
}

cb(
null,
pipeToNewMeter(intoStream(buffer || Buffer.from('')))
);
cb(null, pipeMayCompressToNewMeter(intoStream(buffer || Buffer.from(''))));
}
);
}

return cb(null, pipeToNewMeter(intoStream(stripe.buffer)));
return cb(
null,
pipeMayCompressToNewMeter(intoStream(stripe.buffer))
);
}

if (stripe.file) {
Expand All @@ -384,15 +414,17 @@ export default function producer({
if (fs.existsSync(platformFile)) {
return cb(
null,
pipeToNewMeter(fs.createReadStream(platformFile))
pipeMayCompressToNewMeter(fs.createReadStream(platformFile))
);
}
} catch (err) {
log.debug(`prebuild-install failed[${stripe.file}]:`, err);
}
}

return cb(null, pipeToNewMeter(fs.createReadStream(stripe.file)));
return cb(
null,
pipeMayCompressToNewMeter(fs.createReadStream(stripe.file))
);
}

assert(false, 'producer: bad stripe');
Expand All @@ -407,15 +439,23 @@ export default function producer({
replaceDollarWise(
replaceDollarWise(
replaceDollarWise(
prelude,
'%VIRTUAL_FILESYSTEM%',
JSON.stringify(vfs)
replaceDollarWise(
replaceDollarWise(
prelude,
'%VIRTUAL_FILESYSTEM%',
JSON.stringify(vfs)
),
'%DEFAULT_ENTRYPOINT%',
JSON.stringify(entrypoint)
),
'%SYMLINKS%',
JSON.stringify(snapshotSymLinks)
),
'%DEFAULT_ENTRYPOINT%',
JSON.stringify(entrypoint)
'%DICT%',
JSON.stringify(fileDictionary)
),
'%SYMLINKS%',
JSON.stringify(snapshotSymLinks)
'%DOCOMPRESS%',
JSON.stringify(doCompress)
)
)
)
Expand Down
1 change: 1 addition & 0 deletions lib/walker.ts
Original file line number Diff line number Diff line change
Expand Up @@ -497,6 +497,7 @@ class Walker {
assets = expandFiles(assets, base);

for (const asset of assets) {
log.debug(' Adding asset : .... ', asset);
const stat = await fs.stat(asset);

if (stat.isFile()) {
Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@
"lint": "eslint lib prelude test",
"lint:fix": "npm run lint -- --fix",
"prepare": "npm run build",
"prepublishOnly": "npm run lint",
"test": "npm run build && node test/test.js node14 no-npm && node test/test.js node12 no-npm && node test/test.js node10 no-npm && node test/test.js host only-npm"
},
"greenkeeper": {
Expand Down
Loading

0 comments on commit 215860b

Please sign in to comment.