From f29559d79142cbd32bd2556cf6290e920f5f0103 Mon Sep 17 00:00:00 2001 From: Etienne Rossignon Date: Mon, 7 Jun 2021 13:54:26 +0200 Subject: [PATCH] allow to compress files in virtual file system (#1115) --- README.md | 12 +- lib/compress_type.ts | 6 + lib/help.ts | 7 + lib/index.ts | 30 ++ lib/packer.ts | 6 +- lib/producer.ts | 84 +++-- lib/walker.ts | 1 + package.json | 1 + prelude/bootstrap.js | 325 ++++++++++++++---- prelude/diagnostic.js | 90 +++-- test/test-10-pnpm/main.js | 2 + .../test-12-compression-node-opcua/.gitignore | 2 + test/test-12-compression-node-opcua/main.js | 137 ++++++++ .../package.json | 24 ++ .../test-empty.js | 1 + test/test-12-compression-node-opcua/test-x.js | 12 + test/test-12-compression-node-opcua/test-y.js | 6 + .../main.js | 85 +++++ .../myfile.txt | 2 + .../myfile.txt1.gz | Bin 0 -> 75 bytes .../package.json | 12 + .../test.js | 45 +++ .../test_with_new_fs_promises.js | 81 +++++ test/test-12-compression/.gitignore | 1 + test/test-12-compression/main.js | 111 ++++++ test/test-12-compression/test-empty.js | 1 + test/test-12-compression/test-x.js | 13 + test/test-50-mountpoints/main.js | 2 +- 28 files changed, 964 insertions(+), 135 deletions(-) create mode 100644 lib/compress_type.ts create mode 100644 test/test-12-compression-node-opcua/.gitignore create mode 100644 test/test-12-compression-node-opcua/main.js create mode 100644 test/test-12-compression-node-opcua/package.json create mode 100644 test/test-12-compression-node-opcua/test-empty.js create mode 100644 test/test-12-compression-node-opcua/test-x.js create mode 100644 test/test-12-compression-node-opcua/test-y.js create mode 100644 test/test-12-compression-various-file-access/main.js create mode 100644 test/test-12-compression-various-file-access/myfile.txt create mode 100644 test/test-12-compression-various-file-access/myfile.txt1.gz create mode 100644 test/test-12-compression-various-file-access/package.json create mode 100644 test/test-12-compression-various-file-access/test.js create mode 100644 test/test-12-compression-various-file-access/test_with_new_fs_promises.js create mode 100644 test/test-12-compression/.gitignore create mode 100644 test/test-12-compression/main.js create mode 100644 test/test-12-compression/test-empty.js create mode 100644 test/test-12-compression/test-x.js diff --git a/README.md b/README.md index 878855dc8..bea4e2f8f 100644 --- a/README.md +++ b/README.md @@ -209,6 +209,16 @@ requirements to compile original Node.js: See [pkg-fetch](https://github.com/vercel/pkg-fetch) for more info. +### Compression + +Pass `--compress Brotli` or `--compress GZip` to `pkg` to compress further the content of the files store in the exectable. + +This option can reduce the size of the embedded file system by up to 60%. + +The startup time of the application might be reduced slightly. + +`-C` can be used as a shortcut for `--compress `. + ### Environment | Var | Description | @@ -358,7 +368,7 @@ and check that all the required files for your application are properly incorporated to the final executable. $ pkg --debug app.js -o output - $ DEBUG_PKG output + $ DEBUG_PKG=1 output or diff --git a/lib/compress_type.ts b/lib/compress_type.ts new file mode 100644 index 000000000..e945c882a --- /dev/null +++ b/lib/compress_type.ts @@ -0,0 +1,6 @@ + +export enum CompressType { + None = 0, + GZip = 1, + Brotli = 2 +}; diff --git a/lib/help.ts b/lib/help.ts index 4835b7fa8..920d41c36 100644 --- a/lib/help.ts +++ b/lib/help.ts @@ -19,6 +19,7 @@ export default function help() { --public speed up and disclose the sources of top-level project --public-packages force specified packages to be considered public --no-bytecode skip bytecode generation and include source files as plain js + -C, --compress [default=None] compression algorithm = Brotli or GZip ${chalk.dim('Examples:')} @@ -36,5 +37,11 @@ export default function help() { ${chalk.cyan('$ pkg --public-packages "packageA,packageB" index.js')} ${chalk.gray('–')} Consider all packages to be public ${chalk.cyan('$ pkg --public-packages "*" index.js')} + ${chalk.gray('–')} Bakes '--expose-gc' into executable + ${chalk.cyan('$ pkg --options expose-gc index.js')} + ${chalk.gray( + '–' + )} reduce size of the data packed inside the executable with GZip + ${chalk.cyan('$ pkg --compress GZip index.js')} `); } diff --git a/lib/index.ts b/lib/index.ts index 4a68002f8..04dfa3723 100644 --- a/lib/index.ts +++ b/lib/index.ts @@ -26,6 +26,7 @@ import refine from './refiner'; import { shutdown } from './fabricator'; import walk, { Marker, WalkerParams } from './walker'; import { Target, NodeTarget, SymLinks } from './types'; +import { CompressType } from './compress_type'; import { patchMachOExecutable } from './mach-o'; const { version } = JSON.parse( @@ -247,6 +248,8 @@ export async function exec(argv2: string[]) { 't', 'target', 'targets', + 'C', + 'compress', ], default: { bytecode: true }, }); @@ -274,6 +277,32 @@ export async function exec(argv2: string[]) { const forceBuild = argv.b || argv.build; + // doCompress + const algo = argv.C || argv.compress || 'None'; + + let doCompress: CompressType = CompressType.None; + switch (algo.toLowerCase()) { + case 'brotli': + case 'br': + doCompress = CompressType.Brotli; + break; + case 'gzip': + case 'gz': + doCompress = CompressType.GZip; + break; + case 'none': + break; + default: + // eslint-disable-next-line no-console + throw wasReported( + `Invalid compression algorithm ${algo} ( should be None, Brotli or Gzip)` + ); + } + if (doCompress !== CompressType.None) { + // eslint-disable-next-line no-console + console.log('compression: ', CompressType[doCompress]); + } + // _ if (!argv._.length) { @@ -631,6 +660,7 @@ export async function exec(argv2: string[]) { slash: target.platform === 'win' ? '\\' : '/', target: target as Target, symLinks, + doCompress, }); if (target.platform !== 'win' && target.output) { diff --git a/lib/packer.ts b/lib/packer.ts index aee46781b..d5eb354e5 100644 --- a/lib/packer.ts +++ b/lib/packer.ts @@ -157,7 +157,7 @@ export default function packer({ } } const prelude = - `return (function (REQUIRE_COMMON, VIRTUAL_FILESYSTEM, DEFAULT_ENTRYPOINT, SYMLINKS) { + `return (function (REQUIRE_COMMON, VIRTUAL_FILESYSTEM, DEFAULT_ENTRYPOINT, SYMLINKS, DICT, DOCOMPRESS) { ${bootstrapText}${ log.debugMode ? diagnosticText : '' }\n})(function (exports) {\n${commonText}\n},\n` + @@ -166,6 +166,10 @@ export default function packer({ `%DEFAULT_ENTRYPOINT%` + `\n,\n` + `%SYMLINKS%` + + '\n,\n' + + '%DICT%' + + '\n,\n' + + '%DOCOMPRESS%' + `\n);`; return { prelude, entrypoint, stripes }; diff --git a/lib/producer.ts b/lib/producer.ts index cc525dfc4..16e3edc4b 100644 --- a/lib/producer.ts +++ b/lib/producer.ts @@ -1,3 +1,4 @@ +import { createBrotliCompress, createGzip } from 'zlib'; import Multistream from 'multistream'; import assert from 'assert'; import { execFileSync } from 'child_process'; @@ -12,6 +13,7 @@ import { log, wasReported } from './log'; import { fabricateTwice } from './fabricator'; import { platform, SymLinks, Target } from './types'; import { Stripe } from './packer'; +import { CompressType } from './compress_type'; interface NotFound { notFound: true; @@ -248,14 +250,34 @@ interface ProducerOptions { slash: string; target: Target; symLinks: SymLinks; + doCompress: CompressType; } +const fileDictionary: { [key: string]: string } = {}; +let counter = 0; +function replace(k: string) { + let existingKey = fileDictionary[k]; + if (!existingKey) { + const newkey = counter; + counter += 1; + existingKey = newkey.toString(36); + fileDictionary[k] = existingKey; + } + return existingKey; +} +const separator = '$'; + +function makeKey(filename: string, slash: string): string { + const a = filename.split(slash).map(replace).join(separator); + return a; +} export default function producer({ backpack, bakes, slash, target, symLinks, + doCompress }: ProducerOptions) { return new Promise((resolve, reject) => { if (!Buffer.alloc) { @@ -274,10 +296,8 @@ export default function producer({ for (const stripe of stripes) { let { snap } = stripe; snap = snapshotify(snap, slash); - - if (!vfs[snap]) { - vfs[snap] = {}; - } + const vfsKey = makeKey(snap, slash); + if (!vfs[vfsKey]) vfs[vfsKey] = {}; } const snapshotSymLinks: SymLinks = {}; @@ -285,7 +305,8 @@ export default function producer({ for (const [key, value] of Object.entries(symLinks)) { const k = snapshotify(key, slash); const v = snapshotify(value, slash); - snapshotSymLinks[k] = v; + const vfsKey = makeKey(k, slash); + snapshotSymLinks[vfsKey] = makeKey(v, slash); } let meter: streamMeter.StreamMeter; @@ -295,6 +316,15 @@ export default function producer({ meter = streamMeter(); return s.pipe(meter); } + function pipeMayCompressToNewMeter(s: Readable): streamMeter.StreamMeter { + if (doCompress === CompressType.GZip) { + return pipeToNewMeter(s.pipe(createGzip())); + } + if (doCompress === CompressType.Brotli) { + return pipeToNewMeter(s.pipe(createBrotliCompress())); + } + return pipeToNewMeter(s); + } function next(s: Readable) { count += 1; @@ -327,7 +357,8 @@ export default function producer({ const { store } = prevStripe; let { snap } = prevStripe; snap = snapshotify(snap, slash); - vfs[snap][store] = [track, meter.bytes]; + const vfsKey = makeKey(snap, slash); + vfs[vfsKey][store] = [track, meter.bytes]; track += meter.bytes; } @@ -353,15 +384,14 @@ export default function producer({ return cb(null, intoStream(Buffer.alloc(0))); } - cb( - null, - pipeToNewMeter(intoStream(buffer || Buffer.from(''))) - ); + cb(null, pipeMayCompressToNewMeter(intoStream(buffer || Buffer.from('')))); } ); } - - return cb(null, pipeToNewMeter(intoStream(stripe.buffer))); + return cb( + null, + pipeMayCompressToNewMeter(intoStream(stripe.buffer)) + ); } if (stripe.file) { @@ -384,15 +414,17 @@ export default function producer({ if (fs.existsSync(platformFile)) { return cb( null, - pipeToNewMeter(fs.createReadStream(platformFile)) + pipeMayCompressToNewMeter(fs.createReadStream(platformFile)) ); } } catch (err) { log.debug(`prebuild-install failed[${stripe.file}]:`, err); } } - - return cb(null, pipeToNewMeter(fs.createReadStream(stripe.file))); + return cb( + null, + pipeMayCompressToNewMeter(fs.createReadStream(stripe.file)) + ); } assert(false, 'producer: bad stripe'); @@ -407,15 +439,23 @@ export default function producer({ replaceDollarWise( replaceDollarWise( replaceDollarWise( - prelude, - '%VIRTUAL_FILESYSTEM%', - JSON.stringify(vfs) + replaceDollarWise( + replaceDollarWise( + prelude, + '%VIRTUAL_FILESYSTEM%', + JSON.stringify(vfs) + ), + '%DEFAULT_ENTRYPOINT%', + JSON.stringify(entrypoint) + ), + '%SYMLINKS%', + JSON.stringify(snapshotSymLinks) ), - '%DEFAULT_ENTRYPOINT%', - JSON.stringify(entrypoint) + '%DICT%', + JSON.stringify(fileDictionary) ), - '%SYMLINKS%', - JSON.stringify(snapshotSymLinks) + '%DOCOMPRESS%', + JSON.stringify(doCompress) ) ) ) diff --git a/lib/walker.ts b/lib/walker.ts index 0d3fad9a1..bf78b9683 100644 --- a/lib/walker.ts +++ b/lib/walker.ts @@ -497,6 +497,7 @@ class Walker { assets = expandFiles(assets, base); for (const asset of assets) { + log.debug(' Adding asset : .... ', asset); const stat = await fs.stat(asset); if (stat.isFile()) { diff --git a/package.json b/package.json index b068d7bf8..1045ababc 100644 --- a/package.json +++ b/package.json @@ -72,6 +72,7 @@ "lint": "eslint lib prelude test", "lint:fix": "npm run lint -- --fix", "prepare": "npm run build", + "prepublishOnly": "npm run lint", "test": "npm run build && node test/test.js node14 no-npm && node test/test.js node12 no-npm && node test/test.js node10 no-npm && node test/test.js host only-npm" }, "greenkeeper": { diff --git a/prelude/bootstrap.js b/prelude/bootstrap.js index b919c706d..7ea64eb1d 100644 --- a/prelude/bootstrap.js +++ b/prelude/bootstrap.js @@ -10,11 +10,12 @@ /* global REQUIRE_COMMON */ /* global VIRTUAL_FILESYSTEM */ /* global DEFAULT_ENTRYPOINT */ +/* global DICT */ +/* global DOCOMPRESS */ /* global SYMLINKS */ 'use strict'; -const assert = require('assert'); const childProcess = require('child_process'); const { createHash } = require('crypto'); const fs = require('fs'); @@ -24,6 +25,16 @@ const path = require('path'); const { promisify, _extend } = require('util'); const { Script } = require('vm'); const { tmpdir } = require('os'); +const util = require('util'); + +const { + brotliDecompress, + brotliDecompressSync, + createBrotliDecompress, + createGunzip, + gunzip, + gunzipSync, +} = require('zlib'); const common = {}; REQUIRE_COMMON(common); @@ -111,6 +122,7 @@ function insideMountpoint(f) { return exterior + file.slice(left.length - 1); }) .filter((result) => result); + if (found.length >= 2) throw new Error('UNEXPECTED-00'); if (found.length === 0) return null; return found[0]; @@ -175,30 +187,51 @@ console.log(translateNth(["", "r+"], 0, "d:\\snapshot\\countly\\plugins-ext")); console.log(translateNth(["", "rw"], 0, "d:\\snapshot\\countly\\plugins-ext\\")); console.log(translateNth(["", "a+"], 0, "d:\\snapshot\\countly\\plugins-ext\\1234")); */ +const separator = '$'; +function replace(k) { + return DICT[k]; +} +function makeKey(filename, slash) { + const a = filename.split(slash).map(replace).join(separator); + return a || filename; +} +const dictRev = {}; +Object.entries(DICT).forEach(([k, v]) => { + dictRev[v] = k; +}); + +function toOriginal(fShort) { + return fShort + .split('$') + .map((x) => dictRev[x]) + .join(path.sep); +} -const win32 = process.platform === 'win32'; -const slash = win32 ? '\\' : '/'; -function realpathFromSnapshot(path_) { - let path_normal = path.normalize(path_); - let count = 0; +const symlinksEntries = Object.entries(SYMLINKS); +function normalizePathAndFollowLink(f) { + f = normalizePath(f); + f = makeKey(f, path.sep); let needToSubstitute = true; while (needToSubstitute) { needToSubstitute = false; - for (const [k, v] of Object.entries(SYMLINKS)) { - if (path_normal.startsWith(k + slash) || path_normal === k) { - path_normal = path_normal.replace(k, v); + for (const [k, v] of symlinksEntries) { + if (f.startsWith(`${k}${separator}`) || f === k) { + f = f.replace(k, v); needToSubstitute = true; break; } } - count += 1; } - assert(count === 1 || count === 2); - return path_normal; + return f; +} +function realpathFromSnapshot(path_) { + const realPath = toOriginal(normalizePathAndFollowLink(path_)); + return realPath; } -function normalizePathAndFollowLink(path_) { - return realpathFromSnapshot(normalizePath(path_)); +function findVirtualFileSystemEntry(path_) { + const fShort = normalizePathAndFollowLink(path_); + return VIRTUAL_FILESYSTEM[fShort]; } // ///////////////////////////////////////////////////////////////// @@ -236,11 +269,10 @@ function projectToFilesystem(f) { function projectToNearby(f) { return path.join(xpdn, path.basename(f)); } - function findNativeAddonSyncFreeFromRequire(path_) { if (!insideSnapshot(path_)) throw new Error(`UNEXPECTED-10 ${path_}`); if (path_.slice(-5) !== '.node') return null; // leveldown.node.js - // check mearby first to prevent .node tampering + // check nearby first to prevent .node tampering const projector = projectToNearby(path_); if (fs.existsSync(projector)) return projector; const projectors = projectToFilesystem(path_); @@ -382,17 +414,39 @@ function payloadCopyManySync(source, target, targetStart, sourceStart) { } } +const GZIP = 1; +const BROTLI = 2; function payloadFile(pointer, cb) { const target = Buffer.alloc(pointer[1]); payloadCopyMany(pointer, target, 0, 0, (error) => { if (error) return cb(error); - cb(null, target); + if (DOCOMPRESS === GZIP) { + gunzip(target, (error2, target2) => { + if (error2) return cb(error2); + cb(null, target2); + }); + } else if (DOCOMPRESS === BROTLI) { + brotliDecompress(target, (error2, target2) => { + if (error2) return cb(error2); + cb(null, target2); + }); + } else { + return cb(null, target); + } }); } function payloadFileSync(pointer) { const target = Buffer.alloc(pointer[1]); payloadCopyManySync(pointer, target, 0, 0); + if (DOCOMPRESS === GZIP) { + const target1 = gunzipSync(target); + return target1; + } + if (DOCOMPRESS === BROTLI) { + const target1 = brotliDecompressSync(target); + return target1; + } return target; } @@ -455,6 +509,7 @@ function payloadFileSync(pointer) { access: fs.access, mkdirSync: fs.mkdirSync, mkdir: fs.mkdir, + createReadStream: fs.createReadStream, }; ancestor.realpathSync.native = fs.realpathSync; @@ -513,12 +568,57 @@ function payloadFileSync(pointer) { // open ////////////////////////////////////////////////////////// // /////////////////////////////////////////////////////////////// + function removeTemporaryFolderAndContent(folder) { + if (!folder) return; + if (NODE_VERSION_MAJOR <= 14) { + if (NODE_VERSION_MAJOR <= 10) { + // folder must be empty + for (const f of fs.readdirSync(folder)) { + fs.unlinkSync(path.join(folder, f)); + } + fs.rmdirSync(folder); + } else { + fs.rmdirSync(folder, { recursive: true }); + } + } else { + fs.rmSync(folder, { recursive: true }); + } + } + const temporaryFiles = {}; + const os = require('os'); + let tmpFolder = ''; + process.on('beforeExit', () => { + removeTemporaryFolderAndContent(tmpFolder); + }); + function deflateSync(snapshotFilename) { + if (!tmpFolder) tmpFolder = fs.mkdtempSync(path.join(os.tmpdir(), 'pkg-')); + const content = fs.readFileSync(snapshotFilename, { encoding: 'binary' }); + // content is already unzipped ! + const hash = createHash('sha256').update(content).digest('hex'); + const fName = path.join(tmpFolder, hash); + fs.writeFileSync(fName, content); + return fName; + } + function uncompressExternally(snapshotFilename) { + let t = temporaryFiles[snapshotFilename]; + if (!t) { + const tmpFile = deflateSync(snapshotFilename); + t = { tmpFile }; + temporaryFiles[snapshotFilename] = t; + } + return t.tmpFile; + } + function uncompressExternallyAndOpen(snapshotFilename) { + const externalFile = uncompressExternally(snapshotFilename); + const fd = fs.openSync(externalFile, 'r'); + return fd; + } + function openFromSnapshot(path_, cb) { const cb2 = cb || rethrow; - const path_vfs = normalizePathAndFollowLink(path_); - const entity = VIRTUAL_FILESYSTEM[path_vfs]; - if (!entity) return cb2(error_ENOENT('File or directory', path_vfs)); - const dock = { path: path_vfs, entity, position: 0 }; + const entity = findVirtualFileSystemEntry(path_); + if (!entity) return cb2(error_ENOENT('File or directory', path_)); + const dock = { path: path_, entity, position: 0 }; const nullDevice = windows ? '\\\\.\\NUL' : '/dev/null'; if (cb) { ancestor.open.call(fs, nullDevice, 'r', (error, fd) => { @@ -533,6 +633,30 @@ function payloadFileSync(pointer) { } } + let bypassCompressCheckWhenCallbyCreateReadStream = false; + + fs.createReadStream = function createReadStream(path_) { + if (!insideSnapshot(path_)) { + return ancestor.createReadStream.apply(fs, arguments); + } + if (insideMountpoint(path_)) { + return ancestor.createReadStream.apply( + fs, + translateNth(arguments, 0, path_) + ); + } + bypassCompressCheckWhenCallbyCreateReadStream = true; + const stream = ancestor.createReadStream.apply(fs, arguments); + bypassCompressCheckWhenCallbyCreateReadStream = false; + + if (DOCOMPRESS === GZIP) { + return stream.pipe(createGunzip()); + } + if (DOCOMPRESS === BROTLI) { + return stream.pipe(createBrotliDecompress()); + } + return stream; + }; fs.openSync = function openSync(path_) { if (!insideSnapshot(path_)) { return ancestor.openSync.apply(fs, arguments); @@ -540,7 +664,9 @@ function payloadFileSync(pointer) { if (insideMountpoint(path_)) { return ancestor.openSync.apply(fs, translateNth(arguments, 0, path_)); } - + if (DOCOMPRESS && !bypassCompressCheckWhenCallbyCreateReadStream) { + return uncompressExternallyAndOpen(path_); + } return openFromSnapshot(path_); }; @@ -551,8 +677,11 @@ function payloadFileSync(pointer) { if (insideMountpoint(path_)) { return ancestor.open.apply(fs, translateNth(arguments, 0, path_)); } - const callback = dezalgo(maybeCallback(arguments)); + if (DOCOMPRESS && !bypassCompressCheckWhenCallbyCreateReadStream) { + const fd = uncompressExternallyAndOpen(path_); + return callback(null, fd); + } openFromSnapshot(path_, callback); }; @@ -755,20 +884,19 @@ function payloadFileSync(pointer) { function readFileFromSnapshot(path_, cb) { const cb2 = cb || rethrow; - const path_vfs = normalizePath(path_); - - const entity = VIRTUAL_FILESYSTEM[path_vfs]; - if (!entity) return cb2(error_ENOENT('File', path_vfs)); + const entity = findVirtualFileSystemEntry(path_); + if (!entity) return cb2(error_ENOENT('File', path_)); const entityLinks = entity[STORE_LINKS]; - if (entityLinks) return cb2(error_EISDIR(path_vfs)); + if (entityLinks) return cb2(error_EISDIR(path_)); const entityContent = entity[STORE_CONTENT]; if (entityContent) return readFileFromSnapshotSub(entityContent, cb); const entityBlob = entity[STORE_BLOB]; - if (entityBlob) return cb2(null, Buffer.from('source-code-not-available')); - + if (entityBlob) { + return cb2(null, Buffer.from('source-code-not-available')); + } // why return empty buffer? // otherwise this error will arise: // Error: UNEXPECTED-20 @@ -883,7 +1011,8 @@ function payloadFileSync(pointer) { function getFileTypes(path_, entries) { return entries.map((entry) => { - const entity = VIRTUAL_FILESYSTEM[path.join(path_, entry)]; + const ff = path.join(path_, entry); + const entity = findVirtualFileSystemEntry(ff); if (entity[STORE_BLOB] || entity[STORE_CONTENT]) return new Dirent(entry, 1); if (entity[STORE_LINKS]) return new Dirent(entry, 2); @@ -921,28 +1050,26 @@ function payloadFileSync(pointer) { const cb2 = cb || rethrow; if (isRoot) return readdirRoot(path_, cb); - const path_vfs = normalizePathAndFollowLink(path_); + const entity = findVirtualFileSystemEntry(path_); - const entity = VIRTUAL_FILESYSTEM[path_vfs]; if (!entity) { - return cb2(error_ENOENT('Directory', path_vfs)); + return cb2(error_ENOENT('Directory', path_)); } const entityBlob = entity[STORE_BLOB]; if (entityBlob) { - return cb2(error_ENOTDIR(path_vfs)); + return cb2(error_ENOTDIR(path_)); } const entityContent = entity[STORE_CONTENT]; if (entityContent) { - return cb2(error_ENOTDIR(path_vfs)); + return cb2(error_ENOTDIR(path_)); } const entityLinks = entity[STORE_LINKS]; if (entityLinks) { - return readdirFromSnapshotSub(entityLinks, path_vfs, cb); + return readdirFromSnapshotSub(entityLinks, path_, cb); } - return cb2(new Error('UNEXPECTED-25')); } @@ -1001,9 +1128,11 @@ function payloadFileSync(pointer) { } if (insideMountpoint(path_)) { // app should not know real file name + return path_; } - return realpathFromSnapshot(path_); + const realPath = realpathFromSnapshot(path_); + return realPath; }; fs.realpath = function realpath(path_) { @@ -1012,6 +1141,7 @@ function payloadFileSync(pointer) { } if (insideMountpoint(path_)) { // app should not know real file name + return path_; } const callback = dezalgo(maybeCallback(arguments)); @@ -1099,9 +1229,8 @@ function payloadFileSync(pointer) { function statFromSnapshot(path_, cb) { const cb2 = cb || rethrow; - const path_vfs = normalizePathAndFollowLink(path_); - const entity = VIRTUAL_FILESYSTEM[path_vfs]; - if (!entity) return findNativeAddonForStat(path_vfs, cb); + const entity = findVirtualFileSystemEntry(path_); + if (!entity) return findNativeAddonForStat(path_, cb); const entityStat = entity[STORE_STAT]; if (entityStat) return statFromSnapshotSub(entityStat, cb); return cb2(new Error('UNEXPECTED-35')); @@ -1197,9 +1326,9 @@ function payloadFileSync(pointer) { } function existsFromSnapshot(path_) { - const path_vfs = normalizePathAndFollowLink(path_); - const entity = VIRTUAL_FILESYSTEM[path_vfs]; - if (!entity) return findNativeAddonForExists(path_vfs); + const fShort = normalizePathAndFollowLink(path_); + const entity = VIRTUAL_FILESYSTEM[fShort]; + if (!entity) return findNativeAddonForExists(path_); return true; } @@ -1232,9 +1361,8 @@ function payloadFileSync(pointer) { function accessFromSnapshot(path_, cb) { const cb2 = cb || rethrow; - const path_vfs = normalizePathAndFollowLink(path_); - const entity = VIRTUAL_FILESYSTEM[path_vfs]; - if (!entity) return cb2(error_ENOENT('File or directory', path_vfs)); + const entity = findVirtualFileSystemEntry(path_); + if (!entity) return cb2(error_ENOENT('File or directory', path_)); return cb2(null, undefined); } @@ -1299,16 +1427,72 @@ function payloadFileSync(pointer) { // /////////////////////////////////////////////////////////////// if (fs.promises !== undefined) { - fs.promises.open = promisify(fs.open); - fs.promises.read = promisify(fs.read); - fs.promises.write = promisify(fs.write); - fs.promises.readFile = promisify(fs.readFile); - fs.promises.readdir = promisify(fs.readdir); - fs.promises.realpath = promisify(fs.realpath); - fs.promises.stat = promisify(fs.stat); - fs.promises.lstat = promisify(fs.lstat); - fs.promises.fstat = promisify(fs.fstat); - fs.promises.access = promisify(fs.access); + const ancestor_promises = { + open: fs.promises.open, + read: fs.promises.read, + write: fs.promises.write, + readFile: fs.promises.readFile, + readdir: fs.promises.readdir, + realpath: fs.promises.realpath, + stat: fs.promises.stat, + lstat: fs.promises.lstat, + fstat: fs.promises.fstat, + access: fs.promises.access, + }; + + fs.promises.open = async function open(path_) { + if (!insideSnapshot(path_)) { + return ancestor_promises.open.apply(this, arguments); + } + if (insideMountpoint(path_)) { + return ancestor_promises.open.apply( + this, + translateNth(arguments, 0, path_) + ); + } + const externalFile = uncompressExternally(path_); + arguments[0] = externalFile; + const fd = await ancestor_promises.open.apply(this, arguments); + if (typeof fd === 'object') { + fd._pkg = { externalFile, file: path_ }; + } + return fd; + }; + fs.promises.readFile = async function readFile(path_) { + if (!insideSnapshot(path_)) { + return ancestor_promises.readFile.apply(this, arguments); + } + if (insideMountpoint(path_)) { + return ancestor_promises.readFile.apply( + this, + translateNth(arguments, 0, path_) + ); + } + const externalFile = uncompressExternally(path_); + arguments[0] = externalFile; + return ancestor_promises.readFile.apply(this, arguments); + }; + + fs.promises.write = async function write(fd) { + if (fd._pkg) { + throw new Error( + `[PKG] Cannot write into Snapshot file : ${fd._pkg.file}` + ); + } + return ancestor_promises.write.apply(this, arguments); + }; + + // this one use promisify on purpose + fs.promises.readdir = util.promisify(fs.readdir); + + /* + fs.promises.read = util.promisify(fs.read); + fs.promises.realpath = util.promisify(fs.realpath); + fs.promises.stat = util.promisify(fs.stat); + fs.promises.lstat = util.promisify(fs.lstat); + fs.promises.fstat = util.promisify(fs.fstat); + fs.promises.access = util.promisify(fs.access); + */ } // /////////////////////////////////////////////////////////////// @@ -1341,18 +1525,16 @@ function payloadFileSync(pointer) { if (!insideSnapshot(path_)) { return process.binding('fs').internalModuleStat(long); } - if (insideMountpoint(path_)) { return process .binding('fs') .internalModuleStat(makeLong(translate(path_))); } - const path_vfs = normalizePathAndFollowLink(path_); + const entity = findVirtualFileSystemEntry(path_); - const entity = VIRTUAL_FILESYSTEM[path_vfs]; if (!entity) { - return findNativeAddonForInternalModuleStat(path_vfs); + return findNativeAddonForInternalModuleStat(path_); } const entityBlob = entity[STORE_BLOB]; @@ -1398,7 +1580,8 @@ function payloadFileSync(pointer) { return readFile(makeLong(translate(path_))); } - const entity = VIRTUAL_FILESYSTEM[normalizePathAndFollowLink(path_)]; + const entity = findVirtualFileSystemEntry(path_); + if (!entity) { return returnArray ? [undefined, false] : undefined; } @@ -1407,7 +1590,6 @@ function payloadFileSync(pointer) { if (!entityContent) { return returnArray ? [undefined, false] : undefined; } - return returnArray ? [payloadFileSync(entityContent).toString(), true] : payloadFileSync(entityContent).toString(); @@ -1472,12 +1654,11 @@ function payloadFileSync(pointer) { return ancestor._compile.apply(this, arguments); } if (insideMountpoint(filename_)) { - // DONT TRANSLATE! otherwise __dirname gets real name + // DON'T TRANSLATE! otherwise __dirname gets real name return ancestor._compile.apply(this, arguments); } - const path_vfs = normalizePathAndFollowLink(filename_); - const entity = VIRTUAL_FILESYSTEM[path_vfs]; + const entity = findVirtualFileSystemEntry(filename_); if (!entity) { // let user try to "_compile" a packaged file @@ -1489,7 +1670,7 @@ function payloadFileSync(pointer) { if (entityBlob) { const options = { - filename: path_vfs, + filename: filename_, lineOffset: 0, displayErrors: true, cachedData: payloadFileSync(entityBlob), @@ -1503,9 +1684,9 @@ function payloadFileSync(pointer) { const script = new Script(code, options); const wrapper = script.runInThisContext(options); if (!wrapper) process.exit(4); // for example VERSION_MISMATCH - const dirname = path.dirname(path_vfs); + const dirname = path.dirname(filename_); const rqfn = makeRequireFunction(this); - const args = [this.exports, rqfn, this, path_vfs, dirname]; + const args = [this.exports, rqfn, this, filename_, dirname]; return wrapper.apply(this.exports, args); } @@ -1522,7 +1703,6 @@ function payloadFileSync(pointer) { Module._resolveFilename = function _resolveFilename() { let filename; let flagWasOn = false; - try { filename = ancestor._resolveFilename.apply(this, arguments); } catch (error) { @@ -1539,7 +1719,6 @@ function payloadFileSync(pointer) { FLAG_ENABLE_PROJECT = false; } } - if (!insideSnapshot(filename)) { return filename; } @@ -1820,7 +1999,7 @@ function payloadFileSync(pointer) { // this case triggers on windows mainly. // we copy all stuff that exists in the folder of the .node module - // into the tempory folders... + // into the temporary folders... const files = fs.readdirSync(moduleFolder); for (const file of files) { if (file === moduleBaseName) { diff --git a/prelude/diagnostic.js b/prelude/diagnostic.js index c66d0a453..343048c6d 100644 --- a/prelude/diagnostic.js +++ b/prelude/diagnostic.js @@ -1,34 +1,43 @@ /* eslint-disable global-require */ /* eslint-disable no-console */ +/* global DICT */ 'use strict'; (function installDiagnostic() { const fs = require('fs'); const path = require('path'); - const win32 = process.platform === 'win32'; - function dumpLevel(folderPath, level) { + if (process.env.DEBUG_PKG === '2') { + console.log(Object.entries(DICT)); + } + function dumpLevel(filename, level, tree) { let totalSize = 0; - const d = fs.readdirSync(folderPath); + const d = fs.readdirSync(filename); for (let j = 0; j < d.length; j += 1) { - const f = path.join(folderPath, d[j]); - // const isSymbolicLink = fs.statSync(f).isSymbolicLink(); + const f = path.join(filename, d[j]); const realPath = fs.realpathSync(f); const isSymbolicLink2 = f !== realPath; const s = fs.statSync(f); totalSize += s.size; - console.log( - ' '.padStart(level * 2, ' '), - d[j], - s.size, - isSymbolicLink2 ? `=> ${realPath}` : ' ' - ); if (s.isDirectory() && !isSymbolicLink2) { - totalSize += dumpLevel(f, level + 1); + const tree1 = []; + totalSize += dumpLevel(f, level + 1, tree1); + const str = + (' '.padStart(level * 2, ' ') + d[j]).padEnd(40, ' ') + + (totalSize.toString().padStart(10, ' ') + + (isSymbolicLink2 ? `=> ${realPath}` : ' ')); + tree.push(str); + tree1.forEach((x) => tree.push(x)); + } else { + const str = + (' '.padStart(level * 2, ' ') + d[j]).padEnd(40, ' ') + + (s.size.toString().padStart(10, ' ') + + (isSymbolicLink2 ? `=> ${realPath}` : ' ')); + tree.push(str); } } return totalSize; @@ -47,31 +56,38 @@ if (process.env.DEBUG_PKG) { console.log('------------------------------- virtual file system'); const startFolder = win32 ? 'C:\\snapshot' : '/snapshot'; - const totalSize = dumpLevel(startFolder, 2); + console.log(startFolder); + + const tree = []; + const totalSize = dumpLevel(startFolder, 1, tree); + console.log(tree.join('\n')); + console.log('Total size = ', totalSize); - wrap(fs, 'openSync'); - wrap(fs, 'open'); - wrap(fs, 'readSync'); - wrap(fs, 'read'); - wrap(fs, 'writeSync'); - wrap(fs, 'write'); - wrap(fs, 'closeSync'); - wrap(fs, 'readFileSync'); - wrap(fs, 'close'); - wrap(fs, 'readFile'); - wrap(fs, 'readdirSync'); - wrap(fs, 'readdir'); - wrap(fs, 'realpathSync'); - wrap(fs, 'realpath'); - wrap(fs, 'statSync'); - wrap(fs, 'stat'); - wrap(fs, 'lstatSync'); - wrap(fs, 'lstat'); - wrap(fs, 'fstatSync'); - wrap(fs, 'fstat'); - wrap(fs, 'existsSync'); - wrap(fs, 'exists'); - wrap(fs, 'accessSync'); - wrap(fs, 'access'); + if (process.env.DEBUG_PKG === '2') { + wrap(fs, 'openSync'); + wrap(fs, 'open'); + wrap(fs, 'readSync'); + wrap(fs, 'read'); + wrap(fs, 'writeSync'); + wrap(fs, 'write'); + wrap(fs, 'closeSync'); + wrap(fs, 'readFileSync'); + wrap(fs, 'close'); + wrap(fs, 'readFile'); + wrap(fs, 'readdirSync'); + wrap(fs, 'readdir'); + wrap(fs, 'realpathSync'); + wrap(fs, 'realpath'); + wrap(fs, 'statSync'); + wrap(fs, 'stat'); + wrap(fs, 'lstatSync'); + wrap(fs, 'lstat'); + wrap(fs, 'fstatSync'); + wrap(fs, 'fstat'); + wrap(fs, 'existsSync'); + wrap(fs, 'exists'); + wrap(fs, 'accessSync'); + wrap(fs, 'access'); + } } })(); diff --git a/test/test-10-pnpm/main.js b/test/test-10-pnpm/main.js index 07c24b816..248a32008 100644 --- a/test/test-10-pnpm/main.js +++ b/test/test-10-pnpm/main.js @@ -48,6 +48,8 @@ const logPkg = utils.pkg.sync([ '--target', target, '--debug', + '--compress', + 'Brotli', '--output', output, input, diff --git a/test/test-12-compression-node-opcua/.gitignore b/test/test-12-compression-node-opcua/.gitignore new file mode 100644 index 000000000..a56a7ef43 --- /dev/null +++ b/test/test-12-compression-node-opcua/.gitignore @@ -0,0 +1,2 @@ +node_modules + diff --git a/test/test-12-compression-node-opcua/main.js b/test/test-12-compression-node-opcua/main.js new file mode 100644 index 000000000..7e924f986 --- /dev/null +++ b/test/test-12-compression-node-opcua/main.js @@ -0,0 +1,137 @@ +#!/usr/bin/env node + +'use strict'; + +/* + * A test with a large number of modules with symlinks + * (installed with npm) and compress + * + */ + +const fs = require('fs'); +const path = require('path'); +const assert = require('assert'); +const utils = require('../utils.js'); + +assert(!module.parent); +assert(__dirname === process.cwd()); + +// ignore this test if nodejs <= 10 , as recent version of PNPM do not support nodejs=10 +const MAJOR_VERSION = parseInt(process.version.match(/v([0-9]+)/)[1], 10); +if (MAJOR_VERSION < 12) { + return; +} + +// remove any possible left-over +utils.vacuum.sync('./node_modules'); +utils.vacuum.sync('./pnpm-lock.yaml'); + +// launch `pnpm install` +const pnpmlog = utils.spawn.sync( + path.join( + path.dirname(process.argv[0]), + 'npx' + (process.platform === 'win32' ? '.cmd' : '') + ), + ['pnpm', 'install'], + { cwd: path.dirname(__filename), expect: 0 } +); +console.log('pnpm log :', pnpmlog); + +// verify that we have the .pnpm folder and a symlinks module in node_modules +assert(fs.lstatSync(path.join(__dirname, 'node_modules/.pnpm')).isDirectory()); +assert( + fs + .lstatSync(path.join(__dirname, 'node_modules/node-opcua-address-space')) + .isSymbolicLink() +); + +/* eslint-disable no-unused-vars */ +const input = 'package.json'; +const target = process.argv[2] || 'host'; +const ext = process.platform === 'win32' ? '.exe' : ''; +const outputRef = 'test-output-empty' + ext; +const outputNone = 'test-output-None' + ext; +const outputGZip = 'test-output-GZip' + ext; +const outputBrotli = 'test-output-Brotli' + ext; +const outputBrotliDebug = 'test-output-Brotli-debug' + ext; + +const inspect = ['ignore', 'ignore', 'pipe']; + +console.log(' compiling empty '); +const logPkg0 = utils.pkg.sync( + [ + '--target', + target, + '--compress', + 'None', + '--output', + outputRef, + './test-empty.js', + ], + { stdio: inspect, expect: 0 } +); +const sizeReference = fs.statSync(outputRef).size; + +function pkgCompress(compressMode, output) { + console.log(` compiling compression ${compressMode} `); + const logPkg1 = utils.pkg.sync( + ['--target', target, '--compress', compressMode, '--output', output, input], + { stdio: inspect, expect: 0 } + ); + // check that produced executable is running and produce the expected output. + const log = utils.spawn.sync(path.join(__dirname, output), [], { + cwd: __dirname, + expect: 0, + }); + assert(log === '42\n'); + return fs.statSync(output).size; +} + +const sizeNoneFull = pkgCompress('None', outputNone); +const sizeGZipFull = pkgCompress('GZip', outputGZip); +const sizeBrotliFull = pkgCompress('Brotli', outputBrotli); + +const sizeNone = sizeNoneFull - sizeReference; +const sizeBrotli = sizeBrotliFull - sizeReference; +const sizeGZip = sizeGZipFull - sizeReference; + +console.log('empty = ', sizeReference); +console.log('no compression = ', sizeNoneFull, sizeNone); +console.log('Brotli = ', sizeBrotliFull, sizeBrotli); +console.log('GZip = ', sizeGZipFull, sizeGZip); + +console.log( + ' Δ GZip = ', + sizeGZip - sizeNone, + '(', + (((sizeGZip - sizeNone) / sizeNone) * 100).toFixed(0), + '%)' +); +console.log( + ' Δ Brotli = ', + sizeBrotli - sizeNone, + '(', + (((sizeBrotli - sizeNone) / sizeNone) * 100).toFixed(0), + '%)' +); + +assert(sizeNone > sizeGZip); +assert(sizeGZip > sizeBrotli); + +const logPkg5 = utils.pkg.sync( + ['--target', target, '--compress', 'Crap', '--output', outputBrotli, input], + { expect: 2 } +); + +// xx console.log(logPkg4); +assert(logPkg5.match(/Invalid compression algorithm/g)); + +utils.vacuum.sync(outputRef); +utils.vacuum.sync(outputNone); +utils.vacuum.sync(outputBrotli); +utils.vacuum.sync(outputGZip); +utils.vacuum.sync(outputBrotliDebug); +utils.vacuum.sync('node_modules'); +utils.vacuum.sync('./pnpm-lock.yaml'); + +console.log('OK'); diff --git a/test/test-12-compression-node-opcua/package.json b/test/test-12-compression-node-opcua/package.json new file mode 100644 index 000000000..fa88fb8f1 --- /dev/null +++ b/test/test-12-compression-node-opcua/package.json @@ -0,0 +1,24 @@ +{ + "name": "test-12-compression", + "version": "1.0.0", + "description": "", + "main": "test-x.js", + "scripts": { + "preinstall": "npx only-allow pnpm", + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC", + "dependencies": { + "node-opcua-address-space": "^2.36.0", + "node-opcua-crypto": "^1.7.1", + "node-opcua-nodesets": "^2.36.0" + }, + "bin": "test-x.js", + "pkg": { + "assets": [ + "./node_modules/node-opcua-nodesets/nodesets/*.xml" + ] + } +} diff --git a/test/test-12-compression-node-opcua/test-empty.js b/test/test-12-compression-node-opcua/test-empty.js new file mode 100644 index 000000000..e56b212ef --- /dev/null +++ b/test/test-12-compression-node-opcua/test-empty.js @@ -0,0 +1 @@ +/***/ diff --git a/test/test-12-compression-node-opcua/test-x.js b/test/test-12-compression-node-opcua/test-x.js new file mode 100644 index 000000000..cbcb95309 --- /dev/null +++ b/test/test-12-compression-node-opcua/test-x.js @@ -0,0 +1,12 @@ +'use strict'; + +const { AddressSpace } = require('node-opcua-address-space'); + +const { generateAddressSpace } = require('node-opcua-address-space/nodeJS'); +const { nodesets } = require('node-opcua-nodesets'); + +(async () => { + const addressSpace = AddressSpace.create({}); + await generateAddressSpace(addressSpace, [nodesets.standard]); + console.log('42'); +})(); diff --git a/test/test-12-compression-node-opcua/test-y.js b/test/test-12-compression-node-opcua/test-y.js new file mode 100644 index 000000000..a55e4000e --- /dev/null +++ b/test/test-12-compression-node-opcua/test-y.js @@ -0,0 +1,6 @@ +'use strict'; + +const fs = require('fs'); +const { nodesets } = require('node-opcua-nodesets'); +const a = fs.readFileSync(nodesets.adi); +console.log(a.length); diff --git a/test/test-12-compression-various-file-access/main.js b/test/test-12-compression-various-file-access/main.js new file mode 100644 index 000000000..6e6b45fc6 --- /dev/null +++ b/test/test-12-compression-various-file-access/main.js @@ -0,0 +1,85 @@ +#!/usr/bin/env node + +'use strict'; + +const path = require('path'); +const assert = require('assert'); +const utils = require('../utils.js'); + +assert(!module.parent); +assert(__dirname === process.cwd()); + +/* eslint-disable no-unused-vars */ +const target = process.argv[2] || 'host'; +const ext = process.platform === 'win32' ? '.exe' : ''; +const output = 'output' + ext; + +async function runTest(input) { + const logPkgNone = utils.pkg.sync( + ['--target', target, '--compress', 'None', '--output', output, input], + { expect: 0 } + ); + const logPkgGZip = utils.pkg.sync( + [ + '--target', + target, + '--compress', + 'GZIP', + '--output', + 'gzip_' + output, + input, + ], + { expect: 0 } + ); + + // ----------------------------------------------------------------------- + // Execute programm outside pjg + const logRef = utils.spawn.sync('node', [path.join(__dirname, input)], { + cwd: __dirname, + expect: 0, + }); + + const logNone = utils.spawn.sync(path.join(__dirname, output), [], { + cwd: __dirname, + expect: 0, + }); + + const logGZip = utils.spawn.sync(path.join(__dirname, 'gzip_' + output), [], { + cwd: __dirname, + expect: 0, + }); + + if (logRef !== logNone) { + console.log( + " uncompress pkg doesn't produce same result as running with node" + ); + } + if (logRef !== logGZip) { + console.log( + " GZIP compress pkg doesn't produce same result as running with node" + ); + } + + if (logRef !== logNone || logRef !== logGZip) { + console.log(' Reference:'); + console.log(logRef); + console.log(' Uncompress:'); + console.log(logNone); + console.log(' GZIPed:'); + console.log(logGZip); + + process.exit(1); + } + utils.vacuum.sync(output); + utils.vacuum.sync('gzip_' + output); +} + +const input1 = 'test.js'; + +console.log(' now testing with fs callback'); +runTest(input1); + +console.log(' now testing with fs.promises'); +const input2 = 'test_with_new_fs_promises.js'; +runTest(input2); +console.log('Done'); diff --git a/test/test-12-compression-various-file-access/myfile.txt b/test/test-12-compression-various-file-access/myfile.txt new file mode 100644 index 000000000..09cabb915 --- /dev/null +++ b/test/test-12-compression-various-file-access/myfile.txt @@ -0,0 +1,2 @@ +123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ +123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ \ No newline at end of file diff --git a/test/test-12-compression-various-file-access/myfile.txt1.gz b/test/test-12-compression-various-file-access/myfile.txt1.gz new file mode 100644 index 0000000000000000000000000000000000000000..9e7a2fbdc19f7ce2a93849cfc0ff3574b668223c GIT binary patch literal 75 zcmb2|=HT$s?@eG}&aF(#%t_TNsVHGEHZd|YG&SD7p`@g!tgy8B`-d-|zJ2`qnV*A) fi;t6+yT7ByNM%h+H;1W<1|!4430DPr85kG<3XU5| literal 0 HcmV?d00001 diff --git a/test/test-12-compression-various-file-access/package.json b/test/test-12-compression-various-file-access/package.json new file mode 100644 index 000000000..fb98654f1 --- /dev/null +++ b/test/test-12-compression-various-file-access/package.json @@ -0,0 +1,12 @@ +{ + "name": "test-12-compression-with-asset", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC" +} diff --git a/test/test-12-compression-various-file-access/test.js b/test/test-12-compression-various-file-access/test.js new file mode 100644 index 000000000..392566e87 --- /dev/null +++ b/test/test-12-compression-various-file-access/test.js @@ -0,0 +1,45 @@ +'use strict'; + +const path = require('path'); +const fs = require('fs'); + +const file = path.join(__dirname, 'myfile.txt'); + +function withReadFileSync() { + const wholeFile = fs.readFileSync(file, 'ascii'); + console.log(wholeFile); + console.log('withReadFileSync done !'); +} + +async function withDirectAccess() { + try { + const buffer = Buffer.alloc(1000); + const fd = fs.openSync(file); + fs.readSync(fd, buffer, 0, buffer.length, 10); + fs.closeSync(fd); + console.log(buffer.toString('ascii')); + } catch (err) { + console.log(err.message); + console.log(err); + } + console.log('withDirectAccess ! done!'); +} + +async function withReadStream() { + const stream = fs.createReadStream( + file /* { start: 10, encoding: "ascii" } */ + ); + stream.on('data', (data) => { + console.log(data.toString()); + }); + await new Promise((resolve) => { + stream.on('end', resolve); + }); + console.log('withReadStream done !'); +} + +(async () => { + await withReadFileSync(); + await withReadStream(); + await withDirectAccess(); +})(); diff --git a/test/test-12-compression-various-file-access/test_with_new_fs_promises.js b/test/test-12-compression-various-file-access/test_with_new_fs_promises.js new file mode 100644 index 000000000..5f6d7d002 --- /dev/null +++ b/test/test-12-compression-various-file-access/test_with_new_fs_promises.js @@ -0,0 +1,81 @@ +'use strict'; + +const path = require('path'); + +let fs_promises; +// ignore this test if nodejs <= 10 , as recent version of PNPM do not support nodejs=10 +const MAJOR_VERSION = parseInt(process.version.match(/v([0-9]+)/)[1], 10); +if (MAJOR_VERSION >= 14) { + // only work with nodeJs >= 14.0 + fs_promises = require('fs/promises'); +} else { + fs_promises = require('fs').promises; +} +fs_promises = require('fs').promises; + +// note: this file will be packed in the virtual file system by PKG +const file = path.join(__dirname, 'myfile.txt'); + +async function withPromises() { + // note : the fs.promise API is quite changing between node12/node14 etc... + let fd; + try { + fd = await fs_promises.open(file, 'r'); + // Do something with the file + const buffer = Buffer.alloc(1000); + + if (MAJOR_VERSION >= 14) { + // eslint-disable-line no-unused-vars + const { bytesRead } = await fd.read({ + buffer, + position: 10, + encoding: 'ascii', + }); + if (process.env.DEBUG) { + console.log('bytesRead = ', bytesRead); + } + } else { + await fd.read(buffer, 0, buffer.length, 10); + } + console.log(buffer.toString()); + } catch (err) { + console.log('ERRR =', err.message); + console.log(err); + } finally { + if (fd) { + await fd.close(); + } + } + console.log('withPromises ! done!'); +} + +async function withPromisesReadFile() { + try { + const content = await fs_promises.readFile(file, { encoding: 'ascii' }); + console.log(content); + } catch (err) { + console.log('ERRR =', err.message); + console.log(err); + } + console.log('withPromisesReadFile ! done!'); +} + +async function withPromiseReadDir() { + const thisFile = path.basename(__filename); + try { + const folder = path.join(__dirname, '/'); + const files = await fs_promises.readdir(folder); + console.log( + files.findIndex((x) => x === thisFile) >= 0 ? 'Success' : 'Failure' + ); + } catch (err) { + console.log(err.message); + } + console.log('withPromiseReadDir ! done!'); +} +(async () => { + await withPromises(); + await withPromisesReadFile(); + await withPromiseReadDir(); + console.log(42); +})(); diff --git a/test/test-12-compression/.gitignore b/test/test-12-compression/.gitignore new file mode 100644 index 000000000..3be9623e9 --- /dev/null +++ b/test/test-12-compression/.gitignore @@ -0,0 +1 @@ +output-* diff --git a/test/test-12-compression/main.js b/test/test-12-compression/main.js new file mode 100644 index 000000000..80cdbf799 --- /dev/null +++ b/test/test-12-compression/main.js @@ -0,0 +1,111 @@ +#!/usr/bin/env node + +'use strict'; + +const fs = require('fs'); +const path = require('path'); +const assert = require('assert'); +const utils = require('../utils.js'); + +assert(!module.parent); +assert(__dirname === process.cwd()); + +/* eslint-disable no-unused-vars */ +const input = 'test-x.js'; +const target = 'host'; +const ext = process.platform === 'win32' ? '.exe' : ''; +const outputRef = 'output-empty' + ext; +const outputNone = 'output-None' + ext; +const outputGZip = 'output-Brotli' + ext; +const outputBrotli = 'output-GZip' + ext; +const outputBrotliDebug = 'output-debug' + ext; + +const inspect = ['ignore', 'ignore', 'pipe']; + +console.log(' compiling empty '); +const logPkg0 = utils.pkg.sync( + [ + '--target', + target, + '--compress', + 'None', + '--output', + outputRef, + './test-empty.js', + ], + { expect: 0 } +); +const sizeReference = fs.statSync(outputRef).size; + +function pkgCompress(compressMode, output) { + console.log(` compiling compression ${compressMode} `); + const logPkg1 = utils.pkg.sync( + ['--target', target, '--compress', compressMode, '--output', output, input], + { stdio: inspect, expect: 0 } + ); + // check that produced executable is running and produce the expected output. + const log = utils.spawn.sync(path.join(__dirname, output), [], { + cwd: __dirname, + expect: 0, + }); + assert(log === '42\n'); + return fs.statSync(output).size; +} + +const sizeNoneFull = pkgCompress('None', outputNone); +const sizeGZipFull = pkgCompress('GZip', outputGZip); +const sizeBrotliFull = pkgCompress('Brotli', outputBrotli); + +const sizeNone = sizeNoneFull - sizeReference; +const sizeBrotli = sizeBrotliFull - sizeReference; +const sizeGZip = sizeGZipFull - sizeReference; + +console.log(' compiling compression Brotli + debug'); +const logPkg4 = utils.pkg.sync( + [ + '--target', + target, + '--debug', + '--compress', + 'Brotli', + '--output', + outputBrotliDebug, + input, + ], + { expect: 0 } +); + +console.log('node.exe size =', sizeReference); +console.log('virtual file system'); +console.log('No compression = ', sizeNone - sizeReference); +console.log( + ' Δ GZip = ', + sizeGZip - sizeNone, + '(', + (((sizeGZip - sizeNone) / sizeNone) * 100).toFixed(0), + '%)' +); +console.log( + ' Δ Brotli = ', + sizeBrotli - sizeNone, + '(', + (((sizeBrotli - sizeNone) / sizeNone) * 100).toFixed(0), + '%)' +); + +assert(sizeNone > sizeGZip); +assert(sizeGZip > sizeBrotli); + +const logPkg5 = utils.pkg.sync( + ['--target', target, '--compress', 'Crap', '--output', outputBrotli, input], + { expect: 2 } +); + +// xx console.log(logPkg4); +assert(logPkg5.match(/Invalid compression algorithm/g)); + +utils.vacuum.sync(outputRef); +utils.vacuum.sync(outputNone); +utils.vacuum.sync(outputBrotli); +utils.vacuum.sync(outputGZip); +utils.vacuum.sync(outputBrotliDebug); diff --git a/test/test-12-compression/test-empty.js b/test/test-12-compression/test-empty.js new file mode 100644 index 000000000..10df28383 --- /dev/null +++ b/test/test-12-compression/test-empty.js @@ -0,0 +1 @@ +/** empty */ diff --git a/test/test-12-compression/test-x.js b/test/test-12-compression/test-x.js new file mode 100644 index 000000000..1d4708028 --- /dev/null +++ b/test/test-12-compression/test-x.js @@ -0,0 +1,13 @@ +'use strict'; + +/* eslint-disable no-unused-vars */ + +const m = require('minimist'); +const c = require('chalk'); + +const loremIpsum = + 'Unus audio pluribus sibi quibusdam amicitias habere qua satis molestum sapientes molestum est vel frui non pluribus nimias possit quam esse sollicitum adducas persequantur esse audio nihil sollicitum laxissimas enim rerum vel non ad tamquam habitos implicari placuisse quibusdam nihil.'; +const loremIpsum2 = + 'Semper praetorio satisfaceret semper sit militem ut ipse ordinarias ad atque sit ire in ad sit ut more trusus dignitates more compellebatur ultimum praefectus discrimen et in ut tempestate et dignitates impedita convectio in est inopia ad alioqui et ob.'; + +console.log(42 || loremIpsum2 || loremIpsum2); diff --git a/test/test-50-mountpoints/main.js b/test/test-50-mountpoints/main.js index 2c51e92af..b33ba9c95 100644 --- a/test/test-50-mountpoints/main.js +++ b/test/test-50-mountpoints/main.js @@ -23,7 +23,7 @@ fs.writeFileSync( fs.readFileSync('./plugins-D-ext/test-y-require-D.js') ); -utils.pkg.sync(['--target', target, '--output', output, input]); +utils.pkg.sync(['--debug', '--target', target, '--output', output, input]); right = utils.spawn.sync('./' + path.basename(output), [], { cwd: path.dirname(output),