diff --git a/.github/workflows/auto-start-ci.yml b/.github/workflows/auto-start-ci.yml
index 6a45b2d692b94e..ecc7f6d2e7dbe3 100644
--- a/.github/workflows/auto-start-ci.yml
+++ b/.github/workflows/auto-start-ci.yml
@@ -55,10 +55,10 @@ jobs:
with:
node-version: ${{ env.NODE_VERSION }}
- - name: Install node-core-utils
- run: npm install -g node-core-utils
+ - name: Install @node-core/utils
+ run: npm install -g @node-core/utils
- - name: Setup node-core-utils
+ - name: Setup @node-core/utils
run: |
ncu-config set username ${{ secrets.JENKINS_USER }}
ncu-config set token "${{ secrets.GH_USER_TOKEN }}"
diff --git a/.github/workflows/close-stale-feature-requests.yml b/.github/workflows/close-stale-feature-requests.yml
index ac792a84e4e410..ca2bd3a0d86caf 100644
--- a/.github/workflows/close-stale-feature-requests.yml
+++ b/.github/workflows/close-stale-feature-requests.yml
@@ -48,7 +48,7 @@ jobs:
close-issue-message: ${{ env.CLOSE_MESSAGE }}
stale-issue-message: ${{ env.WARN_MESSAGE }}
only-labels: feature request
- exempt-pr-labels: never-stale
+ exempt-issue-labels: never-stale
# max requests it will send per run to the GitHub API before it deliberately exits to avoid hitting API rate limits
operations-per-run: 500
remove-stale-when-updated: true
diff --git a/.github/workflows/commit-queue.yml b/.github/workflows/commit-queue.yml
index 8cf3978c3f23ef..322d483d6fff7a 100644
--- a/.github/workflows/commit-queue.yml
+++ b/.github/workflows/commit-queue.yml
@@ -74,15 +74,15 @@ jobs:
uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3.8.1
with:
node-version: ${{ env.NODE_VERSION }}
- - name: Install node-core-utils
- run: npm install -g node-core-utils@latest
+ - name: Install @node-core/utils
+ run: npm install -g @node-core/utils
- name: Set variables
run: |
echo "REPOSITORY=$(echo ${{ github.repository }} | cut -d/ -f2)" >> $GITHUB_ENV
echo "OWNER=${{ github.repository_owner }}" >> $GITHUB_ENV
- - name: Configure node-core-utils
+ - name: Configure @node-core/utils
run: |
ncu-config set branch ${GITHUB_REF_NAME}
ncu-config set upstream origin
diff --git a/.github/workflows/notify-on-push.yml b/.github/workflows/notify-on-push.yml
index e5ae6301a5c397..99b0d4938f32cd 100644
--- a/.github/workflows/notify-on-push.yml
+++ b/.github/workflows/notify-on-push.yml
@@ -14,7 +14,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Slack Notification
- uses: rtCamp/action-slack-notify@12e36fc18b0689399306c2e0b3e0f2978b7f1ee7
+ uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8 # 2.2.1
env:
SLACK_COLOR: '#DE512A'
SLACK_ICON: https://github.com/nodejs.png?size=48
@@ -56,7 +56,7 @@ jobs:
GH_TOKEN: ${{ github.token }}
- name: Slack Notification
if: ${{ env.INVALID_COMMIT_MESSAGE }}
- uses: rtCamp/action-slack-notify@12e36fc18b0689399306c2e0b3e0f2978b7f1ee7
+ uses: rtCamp/action-slack-notify@b24d75fe0e728a4bf9fc42ee217caa686d141ee8 # 2.2.1
env:
SLACK_COLOR: '#DE512A'
SLACK_ICON: https://github.com/nodejs.png?size=48
diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml
index 8ce22207982083..64da158ac15173 100644
--- a/.github/workflows/scorecard.yml
+++ b/.github/workflows/scorecard.yml
@@ -73,6 +73,6 @@ jobs:
# Upload the results to GitHub's code scanning dashboard.
- name: Upload to code-scanning
- uses: github/codeql-action/upload-sarif@0ba4244466797eb048eb91a6cd43d5c03ca8bd05 # v2.21.2
+ uses: github/codeql-action/upload-sarif@00e563ead9f72a8461b24876bee2d0c2e8bd2ee8 # v2.21.5
with:
sarif_file: results.sarif
diff --git a/.github/workflows/tools.yml b/.github/workflows/tools.yml
index 880586e2879cbe..187352cc542561 100644
--- a/.github/workflows/tools.yml
+++ b/.github/workflows/tools.yml
@@ -23,6 +23,7 @@ on:
- corepack
- doc
- eslint
+ - github_reporter
- googletest
- histogram
- icu
diff --git a/.github/workflows/update-v8.yml b/.github/workflows/update-v8.yml
index fb123a5b069a72..1a6c87aa528d33 100644
--- a/.github/workflows/update-v8.yml
+++ b/.github/workflows/update-v8.yml
@@ -33,8 +33,8 @@ jobs:
uses: actions/setup-node@5e21ff4d9bc1a8cf6de233a3057d20ec6b3fb69d # v3.8.1
with:
node-version: ${{ env.NODE_VERSION }}
- - name: Install node-core-utils
- run: npm install -g node-core-utils@latest
+ - name: Install @node-core/utils
+ run: npm install -g @node-core/utils
- name: Check and download new V8 version
run: |
./tools/dep_updaters/update-v8-patch.sh > temp-output
diff --git a/.gitpod.yml b/.gitpod.yml
index b674e800f4c015..1a56acf40f1746 100644
--- a/.gitpod.yml
+++ b/.gitpod.yml
@@ -1,7 +1,7 @@
# Ref: https://github.com/gitpod-io/gitpod/issues/6283#issuecomment-1001043454
tasks:
- init: ./configure && timeout 50m make -j16 || true
- - init: pnpm i -g node-core-utils
+ - init: pnpm i -g @node-core/utils
# Ref: https://www.gitpod.io/docs/prebuilds#github-specific-configuration
github:
diff --git a/CHANGELOG.md b/CHANGELOG.md
index b6156ad948fc7e..a0b77c6f5813e4 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,7 +6,7 @@ Select a Node.js version below to view the changelog history:
* [Node.js 19](doc/changelogs/CHANGELOG_V19.md) **Current**
* [Node.js 18](doc/changelogs/CHANGELOG_V18.md) **Long Term Support**
* [Node.js 17](doc/changelogs/CHANGELOG_V17.md) End-of-Life
-* [Node.js 16](doc/changelogs/CHANGELOG_V16.md) **Long Term Support**
+* [Node.js 16](doc/changelogs/CHANGELOG_V16.md) End-of-Life
* [Node.js 15](doc/changelogs/CHANGELOG_V15.md) End-of-Life
* [Node.js 14](doc/changelogs/CHANGELOG_V14.md) End-of-Life
* [Node.js 13](doc/changelogs/CHANGELOG_V13.md) End-of-Life
@@ -36,7 +36,8 @@ release.
-20.7.0
+20.7.1
+20.7.0
20.6.1
20.6.0
20.5.1
diff --git a/Makefile b/Makefile
index b1c267ed5526fe..b7871bf218572c 100644
--- a/Makefile
+++ b/Makefile
@@ -286,7 +286,7 @@ coverage-report-js:
# Runs the C++ tests using the built `cctest` executable.
cctest: all
@out/$(BUILDTYPE)/$@ --gtest_filter=$(GTEST_FILTER)
- @out/$(BUILDTYPE)/embedtest "require('./test/embedding/test-embedding.js')"
+ $(NODE) ./test/embedding/test-embedding.js
.PHONY: list-gtests
list-gtests:
@@ -550,7 +550,7 @@ test-ci: | clear-stalled bench-addons-build build-addons build-js-native-api-tes
$(PYTHON) tools/test.py $(PARALLEL_ARGS) -p tap --logfile test.tap \
--mode=$(BUILDTYPE_LOWER) --flaky-tests=$(FLAKY_TESTS) \
$(TEST_CI_ARGS) $(CI_JS_SUITES) $(CI_NATIVE_SUITES) $(CI_DOC)
- out/Release/embedtest 'require("./test/embedding/test-embedding.js")'
+ $(NODE) ./test/embedding/test-embedding.js
$(info Clean up any leftover processes, error if found.)
ps awwx | grep Release/node | grep -v grep | cat
@PS_OUT=`ps awwx | grep Release/node | grep -v grep | awk '{print $$1}'`; \
@@ -1423,6 +1423,7 @@ FORMAT_CPP_FILES += $(LINT_CPP_FILES)
# C source codes.
FORMAT_CPP_FILES += $(wildcard \
benchmark/napi/*/*.c \
+ test/js-native-api/*.h \
test/js-native-api/*/*.c \
test/js-native-api/*/*.h \
test/node-api/*/*.c \
diff --git a/README.md b/README.md
index 76eb5c3c60ec6d..a9eafa7595ef93 100644
--- a/README.md
+++ b/README.md
@@ -721,6 +721,8 @@ maintaining the Node.js project.
**Akhil Marsonya** <> (he/him)
* [meixg](https://github.com/meixg) -
**Xuguang Mei** <> (he/him)
+* [mertcanaltin](https://github.com/mertcanaltin) -
+ **Mert Can Altin** <>
* [Mesteery](https://github.com/Mesteery) -
**Mestery** <> (he/him)
* [preveen-stack](https://github.com/preveen-stack) -
diff --git a/benchmark/error/error-class-reg-exp.js b/benchmark/error/error-class-reg-exp.js
new file mode 100644
index 00000000000000..de8fced9d653f2
--- /dev/null
+++ b/benchmark/error/error-class-reg-exp.js
@@ -0,0 +1,23 @@
+'use strict';
+
+const common = require('../common.js');
+
+const bench = common.createBenchmark(main, {
+ n: [1e5],
+}, {
+ flags: ['--expose-internals'],
+});
+
+const instances = Array.from({ length: 1000 }).map(() => 'Uint8Array');
+
+function main({ n }) {
+ const {
+ codes: {
+ ERR_INVALID_ARG_TYPE,
+ },
+ } = require('internal/errors');
+ bench.start();
+ for (let i = 0; i < n; ++i)
+ new ERR_INVALID_ARG_TYPE('target', instances, 'test');
+ bench.end(n);
+}
diff --git a/benchmark/fs/bench-accessSync.js b/benchmark/fs/bench-accessSync.js
new file mode 100644
index 00000000000000..a80504620580ce
--- /dev/null
+++ b/benchmark/fs/bench-accessSync.js
@@ -0,0 +1,42 @@
+'use strict';
+
+const common = require('../common');
+const fs = require('fs');
+const tmpdir = require('../../test/common/tmpdir');
+tmpdir.refresh();
+
+const tmpfile = tmpdir.resolve(`.existing-file-${process.pid}`);
+fs.writeFileSync(tmpfile, 'this-is-for-a-benchmark', 'utf8');
+
+const bench = common.createBenchmark(main, {
+ type: ['existing', 'non-existing', 'non-flat-existing'],
+ n: [1e5],
+});
+
+function main({ n, type }) {
+ let path;
+
+ switch (type) {
+ case 'existing':
+ path = __filename;
+ break;
+ case 'non-flat-existing':
+ path = tmpfile;
+ break;
+ case 'non-existing':
+ path = tmpdir.resolve(`.non-existing-file-${process.pid}`);
+ break;
+ default:
+ new Error('Invalid type');
+ }
+
+ bench.start();
+ for (let i = 0; i < n; i++) {
+ try {
+ fs.accessSync(path);
+ } catch {
+ // do nothing
+ }
+ }
+ bench.end(n);
+}
diff --git a/benchmark/fs/bench-copyFileSync.js b/benchmark/fs/bench-copyFileSync.js
new file mode 100644
index 00000000000000..af77fbaaaaa004
--- /dev/null
+++ b/benchmark/fs/bench-copyFileSync.js
@@ -0,0 +1,37 @@
+'use strict';
+
+const common = require('../common');
+const fs = require('fs');
+const tmpdir = require('../../test/common/tmpdir');
+tmpdir.refresh();
+
+const bench = common.createBenchmark(main, {
+ type: ['invalid', 'valid'],
+ n: [1e4],
+});
+
+function main({ n, type }) {
+ tmpdir.refresh();
+ const dest = tmpdir.resolve(`copy-file-bench-${process.pid}`);
+ let path;
+
+ switch (type) {
+ case 'invalid':
+ path = tmpdir.resolve(`.existing-file-${process.pid}`);
+ break;
+ case 'valid':
+ path = __filename;
+ break;
+ default:
+ throw new Error('Invalid type');
+ }
+ bench.start();
+ for (let i = 0; i < n; i++) {
+ try {
+ fs.copyFileSync(path, dest);
+ } catch {
+ // do nothing
+ }
+ }
+ bench.end(n);
+}
diff --git a/benchmark/fs/bench-existsSync.js b/benchmark/fs/bench-existsSync.js
new file mode 100644
index 00000000000000..f9da2765b130f7
--- /dev/null
+++ b/benchmark/fs/bench-existsSync.js
@@ -0,0 +1,38 @@
+'use strict';
+
+const common = require('../common');
+const fs = require('fs');
+const tmpdir = require('../../test/common/tmpdir');
+tmpdir.refresh();
+
+const tmpfile = tmpdir.resolve(`.existing-file-${process.pid}`);
+fs.writeFileSync(tmpfile, 'this-is-for-a-benchmark', 'utf8');
+
+const bench = common.createBenchmark(main, {
+ type: ['existing', 'non-existing', 'non-flat-existing'],
+ n: [1e6],
+});
+
+function main({ n, type }) {
+ let path;
+
+ switch (type) {
+ case 'existing':
+ path = __filename;
+ break;
+ case 'non-flat-existing':
+ path = tmpfile;
+ break;
+ case 'non-existing':
+ path = tmpdir.resolve(`.non-existing-file-${process.pid}`);
+ break;
+ default:
+ new Error('Invalid type');
+ }
+
+ bench.start();
+ for (let i = 0; i < n; i++) {
+ fs.existsSync(path);
+ }
+ bench.end(n);
+}
diff --git a/benchmark/fs/bench-openSync.js b/benchmark/fs/bench-openSync.js
new file mode 100644
index 00000000000000..eaa56139dcbf3c
--- /dev/null
+++ b/benchmark/fs/bench-openSync.js
@@ -0,0 +1,37 @@
+'use strict';
+
+const common = require('../common');
+const fs = require('fs');
+const tmpdir = require('../../test/common/tmpdir');
+tmpdir.refresh();
+
+const bench = common.createBenchmark(main, {
+ type: ['existing', 'non-existing'],
+ n: [1e5],
+});
+
+function main({ n, type }) {
+ let path;
+
+ switch (type) {
+ case 'existing':
+ path = __filename;
+ break;
+ case 'non-existing':
+ path = tmpdir.resolve(`.non-existing-file-${process.pid}`);
+ break;
+ default:
+ new Error('Invalid type');
+ }
+
+ bench.start();
+ for (let i = 0; i < n; i++) {
+ try {
+ const fd = fs.openSync(path, 'r', 0o666);
+ fs.closeSync(fd);
+ } catch {
+ // do nothing
+ }
+ }
+ bench.end(n);
+}
diff --git a/benchmark/fs/bench-opendirSync.js b/benchmark/fs/bench-opendirSync.js
new file mode 100644
index 00000000000000..206822db139ff7
--- /dev/null
+++ b/benchmark/fs/bench-opendirSync.js
@@ -0,0 +1,43 @@
+'use strict';
+
+const common = require('../common');
+const fs = require('fs');
+const path = require('path');
+const tmpdir = require('../../test/common/tmpdir');
+tmpdir.refresh();
+
+const testFiles = fs.readdirSync('test', { withFileTypes: true })
+ .filter((f) => f.isDirectory())
+ .map((f) => path.join(f.path, f.name));
+const bench = common.createBenchmark(main, {
+ type: ['existing', 'non-existing'],
+ n: [1e3],
+});
+
+function main({ n, type }) {
+ let files;
+
+ switch (type) {
+ case 'existing':
+ files = testFiles;
+ break;
+ case 'non-existing':
+ files = [tmpdir.resolve(`.non-existing-file-${Date.now()}`)];
+ break;
+ default:
+ new Error('Invalid type');
+ }
+
+ bench.start();
+ for (let i = 0; i < n; i++) {
+ for (let j = 0; j < files.length; j++) {
+ try {
+ const dir = fs.opendirSync(files[j]);
+ dir.closeSync();
+ } catch {
+ // do nothing
+ }
+ }
+ }
+ bench.end(n);
+}
diff --git a/benchmark/fs/bench-unlinkSync.js b/benchmark/fs/bench-unlinkSync.js
new file mode 100644
index 00000000000000..8b992198c8d368
--- /dev/null
+++ b/benchmark/fs/bench-unlinkSync.js
@@ -0,0 +1,43 @@
+'use strict';
+
+const common = require('../common');
+const fs = require('fs');
+const tmpdir = require('../../test/common/tmpdir');
+tmpdir.refresh();
+
+const bench = common.createBenchmark(main, {
+ type: ['existing', 'non-existing'],
+ n: [1e3],
+});
+
+function main({ n, type }) {
+ let files;
+
+ switch (type) {
+ case 'existing':
+ files = [];
+
+ // Populate tmpdir with mock files
+ for (let i = 0; i < n; i++) {
+ const path = tmpdir.resolve(`unlinksync-bench-file-${i}`);
+ fs.writeFileSync(path, 'bench');
+ files.push(path);
+ }
+ break;
+ case 'non-existing':
+ files = new Array(n).fill(tmpdir.resolve(`.non-existing-file-${Date.now()}`));
+ break;
+ default:
+ new Error('Invalid type');
+ }
+
+ bench.start();
+ for (let i = 0; i < n; i++) {
+ try {
+ fs.unlinkSync(files[i]);
+ } catch {
+ // do nothing
+ }
+ }
+ bench.end(n);
+}
diff --git a/benchmark/fs/readFileSync.js b/benchmark/fs/readFileSync.js
index b81bdce8f27f69..800ab31450f43a 100644
--- a/benchmark/fs/readFileSync.js
+++ b/benchmark/fs/readFileSync.js
@@ -6,12 +6,21 @@ const fs = require('fs');
const bench = common.createBenchmark(main, {
encoding: ['undefined', 'utf8'],
path: ['existing', 'non-existing'],
- n: [60e1],
+ hasFileDescriptor: ['true', 'false'],
+ n: [1e4],
});
-function main({ n, encoding, path }) {
+function main({ n, encoding, path, hasFileDescriptor }) {
const enc = encoding === 'undefined' ? undefined : encoding;
- const file = path === 'existing' ? __filename : '/tmp/not-found';
+ let file;
+ let shouldClose = false;
+
+ if (hasFileDescriptor === 'true') {
+ shouldClose = path === 'existing';
+ file = path === 'existing' ? fs.openSync(__filename) : -1;
+ } else {
+ file = path === 'existing' ? __filename : '/tmp/not-found';
+ }
bench.start();
for (let i = 0; i < n; ++i) {
try {
@@ -21,4 +30,7 @@ function main({ n, encoding, path }) {
}
}
bench.end(n);
+ if (shouldClose) {
+ fs.closeSync(file);
+ }
}
diff --git a/benchmark/perf_hooks/timerfied.js b/benchmark/perf_hooks/timerfied.js
new file mode 100644
index 00000000000000..50be0a47fc1b5a
--- /dev/null
+++ b/benchmark/perf_hooks/timerfied.js
@@ -0,0 +1,36 @@
+'use strict';
+
+const assert = require('assert');
+const common = require('../common.js');
+
+const {
+ PerformanceObserver,
+ performance,
+} = require('perf_hooks');
+
+function randomFn() {
+ return Math.random();
+}
+
+const bench = common.createBenchmark(main, {
+ n: [1e5],
+ observe: ['function'],
+});
+
+let _result;
+
+function main({ n, observe }) {
+ const obs = new PerformanceObserver(() => {
+ bench.end(n);
+ });
+ obs.observe({ entryTypes: [observe], buffered: true });
+
+ const timerfied = performance.timerify(randomFn);
+
+ bench.start();
+ for (let i = 0; i < n; i++)
+ _result = timerfied();
+
+ // Avoid V8 deadcode (elimination)
+ assert.ok(_result);
+}
diff --git a/benchmark/readline/readline-iterable.js b/benchmark/readline/readline-iterable.js
index e92e5d005ea1d0..9be4ea1f52b49c 100644
--- a/benchmark/readline/readline-iterable.js
+++ b/benchmark/readline/readline-iterable.js
@@ -5,6 +5,7 @@ const { Readable } = require('stream');
const bench = common.createBenchmark(main, {
n: [1e1, 1e2, 1e3, 1e4, 1e5, 1e6],
+ type: ['old', 'new'],
});
const loremIpsum = `Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed
@@ -21,6 +22,37 @@ Condimentum mattis pellentesque id nibh tortor id aliquet lectus proin.
Diam in arcu cursus euismod quis viverra nibh.
Rest of line`;
+function oldWay() {
+ const readable = new Readable({
+ objectMode: true,
+ read: () => {
+ this.resume();
+ },
+ destroy: (err, cb) => {
+ this.off('line', lineListener);
+ this.off('close', closeListener);
+ this.close();
+ cb(err);
+ },
+ });
+ const lineListener = (input) => {
+ if (!readable.push(input)) {
+ // TODO(rexagod): drain to resume flow
+ this.pause();
+ }
+ };
+ const closeListener = () => {
+ readable.push(null);
+ };
+ const errorListener = (err) => {
+ readable.destroy(err);
+ };
+ this.on('error', errorListener);
+ this.on('line', lineListener);
+ this.on('close', closeListener);
+ return readable[Symbol.asyncIterator]();
+}
+
function getLoremIpsumStream(repetitions) {
const readable = Readable({
objectMode: true,
@@ -32,7 +64,7 @@ function getLoremIpsumStream(repetitions) {
return readable;
}
-async function main({ n }) {
+async function main({ n, type }) {
bench.start();
let lineCount = 0;
@@ -40,8 +72,10 @@ async function main({ n }) {
input: getLoremIpsumStream(n),
});
+ const readlineIterable = type === 'old' ? oldWay.call(iterable) : iterable;
+
// eslint-disable-next-line no-unused-vars
- for await (const _ of iterable) {
+ for await (const _ of readlineIterable) {
lineCount++;
}
bench.end(lineCount);
diff --git a/benchmark/url/whatwg-url-validity.js b/benchmark/url/whatwg-url-validity.js
new file mode 100644
index 00000000000000..6ba22336408fa1
--- /dev/null
+++ b/benchmark/url/whatwg-url-validity.js
@@ -0,0 +1,23 @@
+'use strict';
+const common = require('../common.js');
+const url = require('url');
+const URL = url.URL;
+
+const bench = common.createBenchmark(main, {
+ type: ['valid', 'invalid'],
+ e: [1e5],
+});
+
+// This benchmark is used to compare the `Invalid URL` path of the URL parser
+function main({ type, e }) {
+ const url = type === 'valid' ? 'https://www.nodejs.org' : 'www.nodejs.org';
+ bench.start();
+ for (let i = 0; i < e; i++) {
+ try {
+ new URL(url);
+ } catch {
+ // do nothing
+ }
+ }
+ bench.end(e);
+}
diff --git a/benchmark/webstreams/pipe-to.js b/benchmark/webstreams/pipe-to.js
index 2b765bf2051569..38324cd20822f1 100644
--- a/benchmark/webstreams/pipe-to.js
+++ b/benchmark/webstreams/pipe-to.js
@@ -6,7 +6,7 @@ const {
} = require('node:stream/web');
const bench = common.createBenchmark(main, {
- n: [5e6],
+ n: [5e5],
highWaterMarkR: [512, 1024, 2048, 4096],
highWaterMarkW: [512, 1024, 2048, 4096],
});
@@ -18,7 +18,7 @@ async function main({ n, highWaterMarkR, highWaterMarkW }) {
const rs = new ReadableStream({
highWaterMark: highWaterMarkR,
pull: function(controller) {
- if (i++ === n) {
+ if (i++ < n) {
controller.enqueue(b);
} else {
controller.close();
diff --git a/benchmark/webstreams/readable-read.js b/benchmark/webstreams/readable-read.js
new file mode 100644
index 00000000000000..d3d28dd7f50301
--- /dev/null
+++ b/benchmark/webstreams/readable-read.js
@@ -0,0 +1,49 @@
+'use strict';
+const common = require('../common.js');
+const { ReadableStream } = require('node:stream/web');
+
+const bench = common.createBenchmark(main, {
+ n: [1e5],
+ type: ['normal', 'byob'],
+});
+
+async function main({ n, type }) {
+ switch (type) {
+ case 'normal': {
+ const rs = new ReadableStream({
+ pull: function(controller) {
+ controller.enqueue('a');
+ },
+ });
+ const reader = rs.getReader();
+ let x = null;
+ bench.start();
+ for (let i = 0; i < n; i++) {
+ const { value } = await reader.read();
+ x = value;
+ }
+ bench.end(n);
+ console.assert(x);
+ break;
+ }
+ case 'byob': {
+ const encode = new TextEncoder();
+ const rs = new ReadableStream({
+ type: 'bytes',
+ pull: function(controller) {
+ controller.enqueue(encode.encode('a'));
+ },
+ });
+ const reader = rs.getReader({ mode: 'byob' });
+ let x = null;
+ bench.start();
+ for (let i = 0; i < n; i++) {
+ const { value } = await reader.read(new Uint8Array(1));
+ x = value;
+ }
+ bench.end(n);
+ console.assert(x);
+ break;
+ }
+ }
+}
diff --git a/common.gypi b/common.gypi
index c6d968c5e7447d..d783c7f970237a 100644
--- a/common.gypi
+++ b/common.gypi
@@ -36,7 +36,7 @@
# Reset this number to 0 on major V8 upgrades.
# Increment by one for each non-official patch applied to deps/v8.
- 'v8_embedder_string': '-node.15',
+ 'v8_embedder_string': '-node.16',
##### V8 defaults for Node.js #####
diff --git a/configure.py b/configure.py
index 2cb81f200c7194..62f041ce52bf85 100755
--- a/configure.py
+++ b/configure.py
@@ -609,6 +609,14 @@
default=None,
help='Use Link Time Code Generation. This feature is only available on Windows.')
+parser.add_argument('--write-snapshot-as-array-literals',
+ action='store_true',
+ dest='write_snapshot_as_array_literals',
+ default=None,
+ help='Write the snapshot data as array literals for readability.'
+ 'By default the snapshot data may be written as string literals on some '
+ 'platforms to speed up compilation.')
+
parser.add_argument('--without-node-snapshot',
action='store_true',
dest='without_node_snapshot',
@@ -1290,6 +1298,11 @@ def configure_node(o):
o['variables']['node_use_node_code_cache'] = b(
not cross_compiling and not options.shared)
+ if options.write_snapshot_as_array_literals is not None:
+ o['variables']['node_write_snapshot_as_array_literals'] = b(options.write_snapshot_as_array_literals)
+ else:
+ o['variables']['node_write_snapshot_as_array_literals'] = b(flavor != 'mac' and flavor != 'linux')
+
if target_arch == 'arm':
configure_arm(o)
elif target_arch in ('mips', 'mipsel', 'mips64el'):
diff --git a/deps/corepack/CHANGELOG.md b/deps/corepack/CHANGELOG.md
index 817125958e1f44..45b5d182594b6e 100644
--- a/deps/corepack/CHANGELOG.md
+++ b/deps/corepack/CHANGELOG.md
@@ -1,5 +1,13 @@
# Changelog
+## [0.20.0](https://github.com/nodejs/corepack/compare/v0.19.0...v0.20.0) (2023-08-29)
+
+
+### Features
+
+* refactor the CLI interface ([#291](https://github.com/nodejs/corepack/issues/291)) ([fe3e5cd](https://github.com/nodejs/corepack/commit/fe3e5cd86c45db0d87c7fdea87d57d59b0bdcb78))
+* update package manager versions ([#292](https://github.com/nodejs/corepack/issues/292)) ([be9c286](https://github.com/nodejs/corepack/commit/be9c286846443ff03081e736fdf4a0ff031fbd38))
+
## [0.19.0](https://github.com/nodejs/corepack/compare/v0.18.1...v0.19.0) (2023-06-24)
diff --git a/deps/corepack/README.md b/deps/corepack/README.md
index 683f539ec00019..a83114e48594c2 100644
--- a/deps/corepack/README.md
+++ b/deps/corepack/README.md
@@ -92,15 +92,14 @@ If there is no Known Good Release for the requested package manager, Corepack
looks up the npm registry for the latest available version and cache it for
future use.
-The Known Good Releases can be updated system-wide using the `--activate` flag
-from the `corepack prepare` and `corepack hydrate` commands.
+The Known Good Releases can be updated system-wide using `corepack install -g`.
## Offline Workflow
The utility commands detailed in the next section.
- Either you can use the network while building your container image, in which
- case you'll simply run `corepack prepare` to make sure that your image
+ case you'll simply run `corepack pack` to make sure that your image
includes the Last Known Good release for the specified package manager.
- If you want to have _all_ Last Known Good releases for all package managers,
@@ -108,10 +107,10 @@ The utility commands detailed in the next section.
- Or you're publishing your project to a system where the network is
unavailable, in which case you'll preemptively generate a package manager
- archive from your local computer (using `corepack prepare -o`) before storing
+ archive from your local computer (using `corepack pack -o`) before storing
it somewhere your container will be able to access (for example within your
repository). After that it'll just be a matter of running
- `corepack hydrate ` to setup the cache.
+ `corepack install -g --cache-only ` to setup the cache.
## Utility Commands
@@ -171,29 +170,52 @@ echo "function npx { corepack npx `$args }" >> $PROFILE
This command will detect where Node.js is installed and will remove the shims
from there.
-### `corepack prepare [... name@version]`
+### `corepack install`
-| Option | Description |
-| ------------- | ----------------------------------------------------------------------- |
-| `--all` | Prepare the "Last Known Good" version of all supported package managers |
-| `-o,--output` | Also generate an archive containing the package managers |
-| `--activate` | Also update the "Last Known Good" release |
+Download and install the package manager configured in the local project.
+This command doesn't change the global version used when running the package
+manager from outside the project (use the \`-g,--global\` flag if you wish
+to do this).
-This command will download the given package managers (or the one configured for
-the local project if no argument is passed in parameter) and store it within the
-Corepack cache. If the `-o,--output` flag is set (optionally with a path as
-parameter), an archive will also be generated that can be used by the
-`corepack hydrate` command.
+### `corepack install <-g,--global> [--all] [... name@version]`
-### `corepack hydrate `
+| Option | Description |
+| --------------------- | ------------------------------------------ |
+| `--all` | Install all Last Known Good releases |
+
+Install the selected package managers and install them on the system.
+
+Package managers thus installed will be configured as the new default when
+calling their respective binaries outside of projects defining the
+`packageManager` field.
+
+### `corepack pack [--all] [... name@version]`
+
+| Option | Description |
+| --------------------- | ------------------------------------------ |
+| `--all` | Pack all Last Known Good releases |
+| `--json ` | Print the output folder rather than logs |
+| `-o,--output ` | Path where to generate the archive |
+
+Download the selected package managers and store them inside a tarball
+suitable for use with `corepack install -g`.
+
+### `corepack use `
+
+When run, this command will retrieve the latest release matching the provided
+descriptor, assign it to the project's package.json file, and automatically
+perform an install.
+
+### `corepack up`
-| Option | Description |
-| ------------ | ----------------------------------------- |
-| `--activate` | Also update the "Last Known Good" release |
+Retrieve the latest available version for the current major release line of
+the package manager used in the local project, and update the project to use
+it.
-This command will retrieve the given package manager from the specified archive
-and will install it within the Corepack cache, ready to be used without further
-network interaction.
+Unlike `corepack use` this command doesn't take a package manager name nor a
+version range, as it will always select the latest available version from the
+same major line. Should you need to upgrade to a new major, use an explicit
+`corepack use {name}@latest` call.
## Environment Variables
@@ -204,7 +226,7 @@ network interaction.
- `COREPACK_ENABLE_NETWORK` can be set to `0` to prevent Corepack from accessing
the network (in which case you'll be responsible for hydrating the package
manager versions that will be required for the projects you'll run, using
- `corepack hydrate`).
+ `corepack install -g --cache-only`).
- `COREPACK_ENABLE_STRICT` can be set to `0` to prevent Corepack from throwing
error if the package manager does not correspond to the one defined for the
diff --git a/deps/corepack/dist/lib/corepack.cjs b/deps/corepack/dist/lib/corepack.cjs
index 22bd4edf5840d0..69e50a97cccec4 100644
--- a/deps/corepack/dist/lib/corepack.cjs
+++ b/deps/corepack/dist/lib/corepack.cjs
@@ -1010,14 +1010,15 @@ var init_lib = __esm({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/internal/constants.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/internal/constants.js
var require_constants = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/internal/constants.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/internal/constants.js"(exports, module2) {
var SEMVER_SPEC_VERSION = "2.0.0";
var MAX_LENGTH = 256;
var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || /* istanbul ignore next */
9007199254740991;
var MAX_SAFE_COMPONENT_LENGTH = 16;
+ var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6;
var RELEASE_TYPES = [
"major",
"premajor",
@@ -1030,6 +1031,7 @@ var require_constants = __commonJS({
module2.exports = {
MAX_LENGTH,
MAX_SAFE_COMPONENT_LENGTH,
+ MAX_SAFE_BUILD_LENGTH,
MAX_SAFE_INTEGER,
RELEASE_TYPES,
SEMVER_SPEC_VERSION,
@@ -1039,42 +1041,57 @@ var require_constants = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/internal/debug.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/internal/debug.js
var require_debug = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/internal/debug.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/internal/debug.js"(exports, module2) {
var debug2 = typeof process === "object" && process.env && process.env.NODE_DEBUG && /\bsemver\b/i.test(process.env.NODE_DEBUG) ? (...args) => console.error("SEMVER", ...args) : () => {
};
module2.exports = debug2;
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/internal/re.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/internal/re.js
var require_re = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/internal/re.js"(exports, module2) {
- var { MAX_SAFE_COMPONENT_LENGTH } = require_constants();
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/internal/re.js"(exports, module2) {
+ var { MAX_SAFE_COMPONENT_LENGTH, MAX_SAFE_BUILD_LENGTH } = require_constants();
var debug2 = require_debug();
exports = module2.exports = {};
var re = exports.re = [];
+ var safeRe = exports.safeRe = [];
var src = exports.src = [];
var t = exports.t = {};
var R = 0;
+ var LETTERDASHNUMBER = "[a-zA-Z0-9-]";
+ var safeRegexReplacements = [
+ ["\\s", 1],
+ ["\\d", MAX_SAFE_COMPONENT_LENGTH],
+ [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH]
+ ];
+ var makeSafeRegex = (value) => {
+ for (const [token, max] of safeRegexReplacements) {
+ value = value.split(`${token}*`).join(`${token}{0,${max}}`).split(`${token}+`).join(`${token}{1,${max}}`);
+ }
+ return value;
+ };
var createToken = (name, value, isGlobal) => {
+ const safe = makeSafeRegex(value);
const index = R++;
debug2(name, index, value);
t[name] = index;
src[index] = value;
re[index] = new RegExp(value, isGlobal ? "g" : void 0);
+ safeRe[index] = new RegExp(safe, isGlobal ? "g" : void 0);
};
createToken("NUMERICIDENTIFIER", "0|[1-9]\\d*");
- createToken("NUMERICIDENTIFIERLOOSE", "[0-9]+");
- createToken("NONNUMERICIDENTIFIER", "\\d*[a-zA-Z-][a-zA-Z0-9-]*");
+ createToken("NUMERICIDENTIFIERLOOSE", "\\d+");
+ createToken("NONNUMERICIDENTIFIER", `\\d*[a-zA-Z-]${LETTERDASHNUMBER}*`);
createToken("MAINVERSION", `(${src[t.NUMERICIDENTIFIER]})\\.(${src[t.NUMERICIDENTIFIER]})\\.(${src[t.NUMERICIDENTIFIER]})`);
createToken("MAINVERSIONLOOSE", `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.(${src[t.NUMERICIDENTIFIERLOOSE]})\\.(${src[t.NUMERICIDENTIFIERLOOSE]})`);
createToken("PRERELEASEIDENTIFIER", `(?:${src[t.NUMERICIDENTIFIER]}|${src[t.NONNUMERICIDENTIFIER]})`);
createToken("PRERELEASEIDENTIFIERLOOSE", `(?:${src[t.NUMERICIDENTIFIERLOOSE]}|${src[t.NONNUMERICIDENTIFIER]})`);
createToken("PRERELEASE", `(?:-(${src[t.PRERELEASEIDENTIFIER]}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`);
createToken("PRERELEASELOOSE", `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE]}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`);
- createToken("BUILDIDENTIFIER", "[0-9A-Za-z-]+");
+ createToken("BUILDIDENTIFIER", `${LETTERDASHNUMBER}+`);
createToken("BUILD", `(?:\\+(${src[t.BUILDIDENTIFIER]}(?:\\.${src[t.BUILDIDENTIFIER]})*))`);
createToken("FULLPLAIN", `v?${src[t.MAINVERSION]}${src[t.PRERELEASE]}?${src[t.BUILD]}?`);
createToken("FULL", `^${src[t.FULLPLAIN]}$`);
@@ -1111,9 +1128,9 @@ var require_re = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/internal/parse-options.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/internal/parse-options.js
var require_parse_options = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/internal/parse-options.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/internal/parse-options.js"(exports, module2) {
var looseOption = Object.freeze({ loose: true });
var emptyOpts = Object.freeze({});
var parseOptions = (options) => {
@@ -1129,9 +1146,9 @@ var require_parse_options = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/internal/identifiers.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/internal/identifiers.js
var require_identifiers = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/internal/identifiers.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/internal/identifiers.js"(exports, module2) {
var numeric = /^[0-9]+$/;
var compareIdentifiers = (a, b) => {
const anum = numeric.test(a);
@@ -1150,12 +1167,12 @@ var require_identifiers = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/classes/semver.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/classes/semver.js
var require_semver = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/classes/semver.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/classes/semver.js"(exports, module2) {
var debug2 = require_debug();
var { MAX_LENGTH, MAX_SAFE_INTEGER } = require_constants();
- var { re, t } = require_re();
+ var { safeRe: re, t } = require_re();
var parseOptions = require_parse_options();
var { compareIdentifiers } = require_identifiers();
var SemVer = class {
@@ -1381,8 +1398,10 @@ var require_semver = __commonJS({
default:
throw new Error(`invalid increment argument: ${release}`);
}
- this.format();
- this.raw = this.version;
+ this.raw = this.format();
+ if (this.build.length) {
+ this.raw += `+${this.build.join(".")}`;
+ }
return this;
}
};
@@ -1390,9 +1409,9 @@ var require_semver = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/parse.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/parse.js
var require_parse = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/parse.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/parse.js"(exports, module2) {
var SemVer = require_semver();
var parse = (version2, options, throwErrors = false) => {
if (version2 instanceof SemVer) {
@@ -1411,9 +1430,9 @@ var require_parse = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/valid.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/valid.js
var require_valid = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/valid.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/valid.js"(exports, module2) {
var parse = require_parse();
var valid = (version2, options) => {
const v = parse(version2, options);
@@ -1423,9 +1442,9 @@ var require_valid = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/clean.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/clean.js
var require_clean = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/clean.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/clean.js"(exports, module2) {
var parse = require_parse();
var clean = (version2, options) => {
const s = parse(version2.trim().replace(/^[=v]+/, ""), options);
@@ -1435,9 +1454,9 @@ var require_clean = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/inc.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/inc.js
var require_inc = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/inc.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/inc.js"(exports, module2) {
var SemVer = require_semver();
var inc = (version2, release, options, identifier, identifierBase) => {
if (typeof options === "string") {
@@ -1458,9 +1477,9 @@ var require_inc = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/diff.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/diff.js
var require_diff = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/diff.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/diff.js"(exports, module2) {
var parse = require_parse();
var diff = (version1, version2) => {
const v1 = parse(version1, null, true);
@@ -1473,6 +1492,19 @@ var require_diff = __commonJS({
const highVersion = v1Higher ? v1 : v2;
const lowVersion = v1Higher ? v2 : v1;
const highHasPre = !!highVersion.prerelease.length;
+ const lowHasPre = !!lowVersion.prerelease.length;
+ if (lowHasPre && !highHasPre) {
+ if (!lowVersion.patch && !lowVersion.minor) {
+ return "major";
+ }
+ if (highVersion.patch) {
+ return "patch";
+ }
+ if (highVersion.minor) {
+ return "minor";
+ }
+ return "major";
+ }
const prefix = highHasPre ? "pre" : "";
if (v1.major !== v2.major) {
return prefix + "major";
@@ -1483,51 +1515,42 @@ var require_diff = __commonJS({
if (v1.patch !== v2.patch) {
return prefix + "patch";
}
- if (highHasPre) {
- return "prerelease";
- }
- if (lowVersion.patch) {
- return "patch";
- }
- if (lowVersion.minor) {
- return "minor";
- }
- return "major";
+ return "prerelease";
};
module2.exports = diff;
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/major.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/major.js
var require_major = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/major.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/major.js"(exports, module2) {
var SemVer = require_semver();
var major = (a, loose) => new SemVer(a, loose).major;
module2.exports = major;
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/minor.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/minor.js
var require_minor = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/minor.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/minor.js"(exports, module2) {
var SemVer = require_semver();
var minor = (a, loose) => new SemVer(a, loose).minor;
module2.exports = minor;
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/patch.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/patch.js
var require_patch = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/patch.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/patch.js"(exports, module2) {
var SemVer = require_semver();
var patch = (a, loose) => new SemVer(a, loose).patch;
module2.exports = patch;
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/prerelease.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/prerelease.js
var require_prerelease = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/prerelease.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/prerelease.js"(exports, module2) {
var parse = require_parse();
var prerelease = (version2, options) => {
const parsed = parse(version2, options);
@@ -1537,36 +1560,36 @@ var require_prerelease = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/compare.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/compare.js
var require_compare = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/compare.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/compare.js"(exports, module2) {
var SemVer = require_semver();
var compare = (a, b, loose) => new SemVer(a, loose).compare(new SemVer(b, loose));
module2.exports = compare;
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/rcompare.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/rcompare.js
var require_rcompare = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/rcompare.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/rcompare.js"(exports, module2) {
var compare = require_compare();
var rcompare = (a, b, loose) => compare(b, a, loose);
module2.exports = rcompare;
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/compare-loose.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/compare-loose.js
var require_compare_loose = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/compare-loose.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/compare-loose.js"(exports, module2) {
var compare = require_compare();
var compareLoose = (a, b) => compare(a, b, true);
module2.exports = compareLoose;
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/compare-build.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/compare-build.js
var require_compare_build = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/compare-build.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/compare-build.js"(exports, module2) {
var SemVer = require_semver();
var compareBuild = (a, b, loose) => {
const versionA = new SemVer(a, loose);
@@ -1577,81 +1600,81 @@ var require_compare_build = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/sort.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/sort.js
var require_sort = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/sort.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/sort.js"(exports, module2) {
var compareBuild = require_compare_build();
var sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose));
module2.exports = sort;
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/rsort.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/rsort.js
var require_rsort = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/rsort.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/rsort.js"(exports, module2) {
var compareBuild = require_compare_build();
var rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose));
module2.exports = rsort;
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/gt.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/gt.js
var require_gt = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/gt.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/gt.js"(exports, module2) {
var compare = require_compare();
var gt = (a, b, loose) => compare(a, b, loose) > 0;
module2.exports = gt;
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/lt.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/lt.js
var require_lt = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/lt.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/lt.js"(exports, module2) {
var compare = require_compare();
var lt = (a, b, loose) => compare(a, b, loose) < 0;
module2.exports = lt;
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/eq.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/eq.js
var require_eq = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/eq.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/eq.js"(exports, module2) {
var compare = require_compare();
var eq = (a, b, loose) => compare(a, b, loose) === 0;
module2.exports = eq;
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/neq.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/neq.js
var require_neq = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/neq.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/neq.js"(exports, module2) {
var compare = require_compare();
var neq = (a, b, loose) => compare(a, b, loose) !== 0;
module2.exports = neq;
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/gte.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/gte.js
var require_gte = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/gte.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/gte.js"(exports, module2) {
var compare = require_compare();
var gte = (a, b, loose) => compare(a, b, loose) >= 0;
module2.exports = gte;
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/lte.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/lte.js
var require_lte = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/lte.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/lte.js"(exports, module2) {
var compare = require_compare();
var lte = (a, b, loose) => compare(a, b, loose) <= 0;
module2.exports = lte;
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/cmp.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/cmp.js
var require_cmp = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/cmp.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/cmp.js"(exports, module2) {
var eq = require_eq();
var neq = require_neq();
var gt = require_gt();
@@ -1698,12 +1721,12 @@ var require_cmp = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/coerce.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/coerce.js
var require_coerce = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/coerce.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/coerce.js"(exports, module2) {
var SemVer = require_semver();
var parse = require_parse();
- var { re, t } = require_re();
+ var { safeRe: re, t } = require_re();
var coerce = (version2, options) => {
if (version2 instanceof SemVer) {
return version2;
@@ -2390,9 +2413,9 @@ var require_lru_cache = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/classes/range.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/classes/range.js
var require_range = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/classes/range.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/classes/range.js"(exports, module2) {
var Range = class {
constructor(range, options) {
options = parseOptions(options);
@@ -2412,10 +2435,10 @@ var require_range = __commonJS({
this.options = options;
this.loose = !!options.loose;
this.includePrerelease = !!options.includePrerelease;
- this.raw = range;
- this.set = range.split("||").map((r) => this.parseRange(r.trim())).filter((c) => c.length);
+ this.raw = range.trim().split(/\s+/).join(" ");
+ this.set = this.raw.split("||").map((r) => this.parseRange(r)).filter((c) => c.length);
if (!this.set.length) {
- throw new TypeError(`Invalid SemVer Range: ${range}`);
+ throw new TypeError(`Invalid SemVer Range: ${this.raw}`);
}
if (this.set.length > 1) {
const first = this.set[0];
@@ -2434,16 +2457,13 @@ var require_range = __commonJS({
this.format();
}
format() {
- this.range = this.set.map((comps) => {
- return comps.join(" ").trim();
- }).join("||").trim();
+ this.range = this.set.map((comps) => comps.join(" ").trim()).join("||").trim();
return this.range;
}
toString() {
return this.range;
}
parseRange(range) {
- range = range.trim();
const memoOpts = (this.options.includePrerelease && FLAG_INCLUDE_PRERELEASE) | (this.options.loose && FLAG_LOOSE);
const memoKey = memoOpts + ":" + range;
const cached = cache.get(memoKey);
@@ -2457,8 +2477,9 @@ var require_range = __commonJS({
range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace);
debug2("comparator trim", range);
range = range.replace(re[t.TILDETRIM], tildeTrimReplace);
+ debug2("tilde trim", range);
range = range.replace(re[t.CARETTRIM], caretTrimReplace);
- range = range.split(/\s+/).join(" ");
+ debug2("caret trim", range);
let rangeList = range.split(" ").map((comp) => parseComparator(comp, this.options)).join(" ").split(/\s+/).map((comp) => replaceGTE0(comp, this.options));
if (loose) {
rangeList = rangeList.filter((comp) => {
@@ -2524,7 +2545,7 @@ var require_range = __commonJS({
var debug2 = require_debug();
var SemVer = require_semver();
var {
- re,
+ safeRe: re,
t,
comparatorTrimReplace,
tildeTrimReplace,
@@ -2558,9 +2579,9 @@ var require_range = __commonJS({
return comp;
};
var isX = (id) => !id || id.toLowerCase() === "x" || id === "*";
- var replaceTildes = (comp, options) => comp.trim().split(/\s+/).map((c) => {
- return replaceTilde(c, options);
- }).join(" ");
+ var replaceTildes = (comp, options) => {
+ return comp.trim().split(/\s+/).map((c) => replaceTilde(c, options)).join(" ");
+ };
var replaceTilde = (comp, options) => {
const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE];
return comp.replace(r, (_, M, m, p, pr) => {
@@ -2582,9 +2603,9 @@ var require_range = __commonJS({
return ret;
});
};
- var replaceCarets = (comp, options) => comp.trim().split(/\s+/).map((c) => {
- return replaceCaret(c, options);
- }).join(" ");
+ var replaceCarets = (comp, options) => {
+ return comp.trim().split(/\s+/).map((c) => replaceCaret(c, options)).join(" ");
+ };
var replaceCaret = (comp, options) => {
debug2("caret", comp, options);
const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET];
@@ -2631,9 +2652,7 @@ var require_range = __commonJS({
};
var replaceXRanges = (comp, options) => {
debug2("replaceXRanges", comp, options);
- return comp.split(/\s+/).map((c) => {
- return replaceXRange(c, options);
- }).join(" ");
+ return comp.split(/\s+/).map((c) => replaceXRange(c, options)).join(" ");
};
var replaceXRange = (comp, options) => {
comp = comp.trim();
@@ -2751,9 +2770,9 @@ var require_range = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/classes/comparator.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/classes/comparator.js
var require_comparator = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/classes/comparator.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/classes/comparator.js"(exports, module2) {
var ANY = Symbol("SemVer ANY");
var Comparator = class {
static get ANY() {
@@ -2768,6 +2787,7 @@ var require_comparator = __commonJS({
comp = comp.value;
}
}
+ comp = comp.trim().split(/\s+/).join(" ");
debug2("comparator", comp, options);
this.options = options;
this.loose = !!options.loose;
@@ -2854,7 +2874,7 @@ var require_comparator = __commonJS({
};
module2.exports = Comparator;
var parseOptions = require_parse_options();
- var { re, t } = require_re();
+ var { safeRe: re, t } = require_re();
var cmp = require_cmp();
var debug2 = require_debug();
var SemVer = require_semver();
@@ -2862,9 +2882,9 @@ var require_comparator = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/satisfies.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/satisfies.js
var require_satisfies = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/functions/satisfies.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/functions/satisfies.js"(exports, module2) {
var Range = require_range();
var satisfies = (version2, range, options) => {
try {
@@ -2878,18 +2898,18 @@ var require_satisfies = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/to-comparators.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/to-comparators.js
var require_to_comparators = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/to-comparators.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/to-comparators.js"(exports, module2) {
var Range = require_range();
var toComparators = (range, options) => new Range(range, options).set.map((comp) => comp.map((c) => c.value).join(" ").trim().split(" "));
module2.exports = toComparators;
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/max-satisfying.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/max-satisfying.js
var require_max_satisfying = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/max-satisfying.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/max-satisfying.js"(exports, module2) {
var SemVer = require_semver();
var Range = require_range();
var maxSatisfying = (versions, range, options) => {
@@ -2915,9 +2935,9 @@ var require_max_satisfying = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/min-satisfying.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/min-satisfying.js
var require_min_satisfying = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/min-satisfying.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/min-satisfying.js"(exports, module2) {
var SemVer = require_semver();
var Range = require_range();
var minSatisfying = (versions, range, options) => {
@@ -2943,9 +2963,9 @@ var require_min_satisfying = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/min-version.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/min-version.js
var require_min_version = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/min-version.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/min-version.js"(exports, module2) {
var SemVer = require_semver();
var Range = require_range();
var gt = require_gt();
@@ -2999,9 +3019,9 @@ var require_min_version = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/valid.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/valid.js
var require_valid2 = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/valid.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/valid.js"(exports, module2) {
var Range = require_range();
var validRange = (range, options) => {
try {
@@ -3014,9 +3034,9 @@ var require_valid2 = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/outside.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/outside.js
var require_outside = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/outside.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/outside.js"(exports, module2) {
var SemVer = require_semver();
var Comparator = require_comparator();
var { ANY } = Comparator;
@@ -3082,27 +3102,27 @@ var require_outside = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/gtr.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/gtr.js
var require_gtr = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/gtr.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/gtr.js"(exports, module2) {
var outside = require_outside();
var gtr = (version2, range, options) => outside(version2, range, ">", options);
module2.exports = gtr;
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/ltr.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/ltr.js
var require_ltr = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/ltr.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/ltr.js"(exports, module2) {
var outside = require_outside();
var ltr = (version2, range, options) => outside(version2, range, "<", options);
module2.exports = ltr;
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/intersects.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/intersects.js
var require_intersects = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/intersects.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/intersects.js"(exports, module2) {
var Range = require_range();
var intersects = (r1, r2, options) => {
r1 = new Range(r1, options);
@@ -3113,9 +3133,9 @@ var require_intersects = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/simplify.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/simplify.js
var require_simplify = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/simplify.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/simplify.js"(exports, module2) {
var satisfies = require_satisfies();
var compare = require_compare();
module2.exports = (versions, range, options) => {
@@ -3162,9 +3182,9 @@ var require_simplify = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/subset.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/subset.js
var require_subset = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/ranges/subset.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/ranges/subset.js"(exports, module2) {
var Range = require_range();
var Comparator = require_comparator();
var { ANY } = Comparator;
@@ -3324,9 +3344,9 @@ var require_subset = __commonJS({
}
});
-// .yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/index.js
+// .yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/index.js
var require_semver2 = __commonJS({
- ".yarn/cache/semver-npm-7.5.1-0736382fb9-20fce78943.zip/node_modules/semver/index.js"(exports, module2) {
+ ".yarn/cache/semver-npm-7.5.3-275095dbf3-9e949f7d57.zip/node_modules/semver/index.js"(exports, module2) {
var internalRe = require_re();
var constants = require_constants();
var SemVer = require_semver();
@@ -5203,9 +5223,9 @@ var require_lru_cache2 = __commonJS({
}
});
-// .yarn/cache/agent-base-npm-7.0.2-13f6445b9c-a2971dc644.zip/node_modules/agent-base/dist/helpers.js
+// .yarn/cache/agent-base-npm-7.1.0-4b12ba5111-d1c9dc1b33.zip/node_modules/agent-base/dist/helpers.js
var require_helpers = __commonJS({
- ".yarn/cache/agent-base-npm-7.0.2-13f6445b9c-a2971dc644.zip/node_modules/agent-base/dist/helpers.js"(exports) {
+ ".yarn/cache/agent-base-npm-7.1.0-4b12ba5111-d1c9dc1b33.zip/node_modules/agent-base/dist/helpers.js"(exports) {
"use strict";
var __createBinding2 = exports && exports.__createBinding || (Object.create ? function(o, m, k, k2) {
if (k2 === void 0)
@@ -5278,9 +5298,9 @@ var require_helpers = __commonJS({
}
});
-// .yarn/cache/agent-base-npm-7.0.2-13f6445b9c-a2971dc644.zip/node_modules/agent-base/dist/index.js
+// .yarn/cache/agent-base-npm-7.1.0-4b12ba5111-d1c9dc1b33.zip/node_modules/agent-base/dist/index.js
var require_dist = __commonJS({
- ".yarn/cache/agent-base-npm-7.0.2-13f6445b9c-a2971dc644.zip/node_modules/agent-base/dist/index.js"(exports) {
+ ".yarn/cache/agent-base-npm-7.1.0-4b12ba5111-d1c9dc1b33.zip/node_modules/agent-base/dist/index.js"(exports) {
"use strict";
var __createBinding2 = exports && exports.__createBinding || (Object.create ? function(o, m, k, k2) {
if (k2 === void 0)
@@ -5323,27 +5343,34 @@ var require_dist = __commonJS({
exports.Agent = void 0;
var http = __importStar2(require("http"));
__exportStar2(require_helpers(), exports);
- function isSecureEndpoint() {
- const { stack } = new Error();
- if (typeof stack !== "string")
- return false;
- return stack.split("\n").some((l) => l.indexOf("(https.js:") !== -1 || l.indexOf("node:https:") !== -1);
- }
var INTERNAL = Symbol("AgentBaseInternalState");
var Agent = class extends http.Agent {
constructor(opts) {
super(opts);
this[INTERNAL] = {};
}
- createSocket(req, options, cb) {
- let secureEndpoint = typeof options.secureEndpoint === "boolean" ? options.secureEndpoint : void 0;
- if (typeof secureEndpoint === "undefined" && typeof options.protocol === "string") {
- secureEndpoint = options.protocol === "https:";
- }
- if (typeof secureEndpoint === "undefined") {
- secureEndpoint = isSecureEndpoint();
+ /**
+ * Determine whether this is an `http` or `https` request.
+ */
+ isSecureEndpoint(options) {
+ if (options) {
+ if (typeof options.secureEndpoint === "boolean") {
+ return options.secureEndpoint;
+ }
+ if (typeof options.protocol === "string") {
+ return options.protocol === "https:";
+ }
}
- const connectOpts = { ...options, secureEndpoint };
+ const { stack } = new Error();
+ if (typeof stack !== "string")
+ return false;
+ return stack.split("\n").some((l) => l.indexOf("(https.js:") !== -1 || l.indexOf("node:https:") !== -1);
+ }
+ createSocket(req, options, cb) {
+ const connectOpts = {
+ ...options,
+ secureEndpoint: this.isSecureEndpoint(options)
+ };
Promise.resolve().then(() => this.connect(req, connectOpts)).then((socket) => {
if (socket instanceof http.Agent) {
return socket.addRequest(req, connectOpts);
@@ -5369,7 +5396,7 @@ var require_dist = __commonJS({
}
}
get protocol() {
- return this[INTERNAL].protocol ?? (isSecureEndpoint() ? "https:" : "http:");
+ return this[INTERNAL].protocol ?? (this.isSecureEndpoint() ? "https:" : "http:");
}
set protocol(v) {
if (this[INTERNAL]) {
@@ -5968,9 +5995,9 @@ var require_proxy_from_env = __commonJS({
}
});
-// .yarn/cache/http-proxy-agent-npm-6.1.0-cac4082d01-3d220db021.zip/node_modules/http-proxy-agent/dist/index.js
+// .yarn/cache/http-proxy-agent-npm-7.0.0-106a57cc8c-a028878555.zip/node_modules/http-proxy-agent/dist/index.js
var require_dist2 = __commonJS({
- ".yarn/cache/http-proxy-agent-npm-6.1.0-cac4082d01-3d220db021.zip/node_modules/http-proxy-agent/dist/index.js"(exports) {
+ ".yarn/cache/http-proxy-agent-npm-7.0.0-106a57cc8c-a028878555.zip/node_modules/http-proxy-agent/dist/index.js"(exports) {
"use strict";
var __createBinding2 = exports && exports.__createBinding || (Object.create ? function(o, m, k, k2) {
if (k2 === void 0)
@@ -6015,27 +6042,26 @@ var require_dist2 = __commonJS({
var events_1 = require("events");
var agent_base_1 = require_dist();
var debug2 = (0, debug_1.default)("http-proxy-agent");
- function isHTTPS(protocol) {
- return typeof protocol === "string" ? /^https:?$/i.test(protocol) : false;
- }
var HttpProxyAgent = class extends agent_base_1.Agent {
- get secureProxy() {
- return isHTTPS(this.proxy.protocol);
- }
constructor(proxy, opts) {
super(opts);
this.proxy = typeof proxy === "string" ? new URL(proxy) : proxy;
this.proxyHeaders = opts?.headers ?? {};
debug2("Creating new HttpProxyAgent instance: %o", this.proxy.href);
const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, "");
- const port = this.proxy.port ? parseInt(this.proxy.port, 10) : this.secureProxy ? 443 : 80;
+ const port = this.proxy.port ? parseInt(this.proxy.port, 10) : this.proxy.protocol === "https:" ? 443 : 80;
this.connectOpts = {
...opts ? omit(opts, "headers") : null,
host,
port
};
}
- async connect(req, opts) {
+ addRequest(req, opts) {
+ req._header = null;
+ this.setRequestProps(req, opts);
+ super.addRequest(req, opts);
+ }
+ setRequestProps(req, opts) {
const { proxy } = this;
const protocol = opts.secureEndpoint ? "https:" : "http:";
const hostname = req.getHeader("host") || "localhost";
@@ -6045,7 +6071,6 @@ var require_dist2 = __commonJS({
url.port = String(opts.port);
}
req.path = String(url);
- req._header = null;
const headers = typeof this.proxyHeaders === "function" ? this.proxyHeaders() : { ...this.proxyHeaders };
if (proxy.username || proxy.password) {
const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`;
@@ -6060,13 +6085,11 @@ var require_dist2 = __commonJS({
req.setHeader(name, value);
}
}
- let socket;
- if (this.secureProxy) {
- debug2("Creating `tls.Socket`: %o", this.connectOpts);
- socket = tls.connect(this.connectOpts);
- } else {
- debug2("Creating `net.Socket`: %o", this.connectOpts);
- socket = net.connect(this.connectOpts);
+ }
+ async connect(req, opts) {
+ req._header = null;
+ if (!req.path.includes("://")) {
+ this.setRequestProps(req, opts);
}
let first;
let endOfHeaders;
@@ -6079,6 +6102,14 @@ var require_dist2 = __commonJS({
req.outputData[0].data = req._header + first.substring(endOfHeaders);
debug2("Output buffer: %o", req.outputData[0].data);
}
+ let socket;
+ if (this.proxy.protocol === "https:") {
+ debug2("Creating `tls.Socket`: %o", this.connectOpts);
+ socket = tls.connect(this.connectOpts);
+ } else {
+ debug2("Creating `net.Socket`: %o", this.connectOpts);
+ socket = net.connect(this.connectOpts);
+ }
await (0, events_1.once)(socket, "connect");
return socket;
}
@@ -6098,9 +6129,9 @@ var require_dist2 = __commonJS({
}
});
-// .yarn/cache/https-proxy-agent-npm-6.2.0-0406eb3743-9a7617e512.zip/node_modules/https-proxy-agent/dist/parse-proxy-response.js
+// .yarn/cache/https-proxy-agent-npm-7.0.1-a2d5d93ee0-4fc3e7f50c.zip/node_modules/https-proxy-agent/dist/parse-proxy-response.js
var require_parse_proxy_response = __commonJS({
- ".yarn/cache/https-proxy-agent-npm-6.2.0-0406eb3743-9a7617e512.zip/node_modules/https-proxy-agent/dist/parse-proxy-response.js"(exports) {
+ ".yarn/cache/https-proxy-agent-npm-7.0.1-a2d5d93ee0-4fc3e7f50c.zip/node_modules/https-proxy-agent/dist/parse-proxy-response.js"(exports) {
"use strict";
var __importDefault2 = exports && exports.__importDefault || function(mod) {
return mod && mod.__esModule ? mod : { "default": mod };
@@ -6123,14 +6154,12 @@ var require_parse_proxy_response = __commonJS({
function cleanup() {
socket.removeListener("end", onend);
socket.removeListener("error", onerror);
- socket.removeListener("close", onclose);
socket.removeListener("readable", read);
}
- function onclose(err) {
- debug2("onclose had error %o", err);
- }
function onend() {
+ cleanup();
debug2("onend");
+ reject(new Error("Proxy connection ended before receiving CONNECT response"));
}
function onerror(err) {
cleanup();
@@ -6147,10 +6176,11 @@ var require_parse_proxy_response = __commonJS({
read();
return;
}
- const headerParts = buffered.toString("ascii").split("\r\n");
+ const headerParts = buffered.slice(0, endOfHeaders).toString("ascii").split("\r\n");
const firstLine = headerParts.shift();
if (!firstLine) {
- throw new Error("No header received");
+ socket.destroy();
+ return reject(new Error("No header received from proxy CONNECT response"));
}
const firstLineParts = firstLine.split(" ");
const statusCode = +firstLineParts[1];
@@ -6161,7 +6191,8 @@ var require_parse_proxy_response = __commonJS({
continue;
const firstColon = header.indexOf(":");
if (firstColon === -1) {
- throw new Error(`Invalid header: "${header}"`);
+ socket.destroy();
+ return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`));
}
const key = header.slice(0, firstColon).toLowerCase();
const value = header.slice(firstColon + 1).trimStart();
@@ -6174,7 +6205,7 @@ var require_parse_proxy_response = __commonJS({
headers[key] = value;
}
}
- debug2("got proxy server response: %o", firstLine);
+ debug2("got proxy server response: %o %o", firstLine, headers);
cleanup();
resolve({
connect: {
@@ -6186,7 +6217,6 @@ var require_parse_proxy_response = __commonJS({
});
}
socket.on("error", onerror);
- socket.on("close", onclose);
socket.on("end", onend);
read();
});
@@ -6195,9 +6225,9 @@ var require_parse_proxy_response = __commonJS({
}
});
-// .yarn/cache/https-proxy-agent-npm-6.2.0-0406eb3743-9a7617e512.zip/node_modules/https-proxy-agent/dist/index.js
+// .yarn/cache/https-proxy-agent-npm-7.0.1-a2d5d93ee0-4fc3e7f50c.zip/node_modules/https-proxy-agent/dist/index.js
var require_dist3 = __commonJS({
- ".yarn/cache/https-proxy-agent-npm-6.2.0-0406eb3743-9a7617e512.zip/node_modules/https-proxy-agent/dist/index.js"(exports) {
+ ".yarn/cache/https-proxy-agent-npm-7.0.1-a2d5d93ee0-4fc3e7f50c.zip/node_modules/https-proxy-agent/dist/index.js"(exports) {
"use strict";
var __createBinding2 = exports && exports.__createBinding || (Object.create ? function(o, m, k, k2) {
if (k2 === void 0)
@@ -6244,9 +6274,6 @@ var require_dist3 = __commonJS({
var parse_proxy_response_1 = require_parse_proxy_response();
var debug2 = (0, debug_1.default)("https-proxy-agent");
var HttpsProxyAgent = class extends agent_base_1.Agent {
- get secureProxy() {
- return isHTTPS(this.proxy.protocol);
- }
constructor(proxy, opts) {
super(opts);
this.options = { path: void 0 };
@@ -6254,7 +6281,7 @@ var require_dist3 = __commonJS({
this.proxyHeaders = opts?.headers ?? {};
debug2("Creating new HttpsProxyAgent instance: %o", this.proxy.href);
const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, "");
- const port = this.proxy.port ? parseInt(this.proxy.port, 10) : this.secureProxy ? 443 : 80;
+ const port = this.proxy.port ? parseInt(this.proxy.port, 10) : this.proxy.protocol === "https:" ? 443 : 80;
this.connectOpts = {
// Attempt to negotiate http/1.1 for proxy servers that support http/2
ALPNProtocols: ["http/1.1"],
@@ -6268,12 +6295,12 @@ var require_dist3 = __commonJS({
* new HTTP request.
*/
async connect(req, opts) {
- const { proxy, secureProxy } = this;
+ const { proxy } = this;
if (!opts.host) {
throw new TypeError('No "host" provided');
}
let socket;
- if (secureProxy) {
+ if (proxy.protocol === "https:") {
debug2("Creating `tls.Socket`: %o", this.connectOpts);
socket = tls.connect(this.connectOpts);
} else {
@@ -6332,9 +6359,6 @@ var require_dist3 = __commonJS({
function resume(socket) {
socket.resume();
}
- function isHTTPS(protocol) {
- return typeof protocol === "string" ? /^https:?$/i.test(protocol) : false;
- }
function omit(obj, ...keys) {
const ret = {};
let key;
@@ -6353,7 +6377,7 @@ var require_ip = __commonJS({
".yarn/cache/ip-npm-2.0.0-204facb3cc-42a7cf251b.zip/node_modules/ip/lib/ip.js"(exports) {
var ip = exports;
var { Buffer: Buffer2 } = require("buffer");
- var os2 = require("os");
+ var os3 = require("os");
ip.toBuffer = function(ip2, buff, offset) {
offset = ~~offset;
let result;
@@ -6610,7 +6634,7 @@ var require_ip = __commonJS({
return family === "ipv4" ? "127.0.0.1" : "fe80::1";
};
ip.address = function(name, family) {
- const interfaces = os2.networkInterfaces();
+ const interfaces = os3.networkInterfaces();
family = _normalizeFamily(family);
if (name && name !== "private" && name !== "public") {
const res = interfaces[name].filter((details) => {
@@ -9203,56 +9227,56 @@ var require_polyfills = __commonJS({
}
var chdir;
module2.exports = patch;
- function patch(fs6) {
+ function patch(fs8) {
if (constants.hasOwnProperty("O_SYMLINK") && process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) {
- patchLchmod(fs6);
- }
- if (!fs6.lutimes) {
- patchLutimes(fs6);
- }
- fs6.chown = chownFix(fs6.chown);
- fs6.fchown = chownFix(fs6.fchown);
- fs6.lchown = chownFix(fs6.lchown);
- fs6.chmod = chmodFix(fs6.chmod);
- fs6.fchmod = chmodFix(fs6.fchmod);
- fs6.lchmod = chmodFix(fs6.lchmod);
- fs6.chownSync = chownFixSync(fs6.chownSync);
- fs6.fchownSync = chownFixSync(fs6.fchownSync);
- fs6.lchownSync = chownFixSync(fs6.lchownSync);
- fs6.chmodSync = chmodFixSync(fs6.chmodSync);
- fs6.fchmodSync = chmodFixSync(fs6.fchmodSync);
- fs6.lchmodSync = chmodFixSync(fs6.lchmodSync);
- fs6.stat = statFix(fs6.stat);
- fs6.fstat = statFix(fs6.fstat);
- fs6.lstat = statFix(fs6.lstat);
- fs6.statSync = statFixSync(fs6.statSync);
- fs6.fstatSync = statFixSync(fs6.fstatSync);
- fs6.lstatSync = statFixSync(fs6.lstatSync);
- if (fs6.chmod && !fs6.lchmod) {
- fs6.lchmod = function(path8, mode, cb) {
+ patchLchmod(fs8);
+ }
+ if (!fs8.lutimes) {
+ patchLutimes(fs8);
+ }
+ fs8.chown = chownFix(fs8.chown);
+ fs8.fchown = chownFix(fs8.fchown);
+ fs8.lchown = chownFix(fs8.lchown);
+ fs8.chmod = chmodFix(fs8.chmod);
+ fs8.fchmod = chmodFix(fs8.fchmod);
+ fs8.lchmod = chmodFix(fs8.lchmod);
+ fs8.chownSync = chownFixSync(fs8.chownSync);
+ fs8.fchownSync = chownFixSync(fs8.fchownSync);
+ fs8.lchownSync = chownFixSync(fs8.lchownSync);
+ fs8.chmodSync = chmodFixSync(fs8.chmodSync);
+ fs8.fchmodSync = chmodFixSync(fs8.fchmodSync);
+ fs8.lchmodSync = chmodFixSync(fs8.lchmodSync);
+ fs8.stat = statFix(fs8.stat);
+ fs8.fstat = statFix(fs8.fstat);
+ fs8.lstat = statFix(fs8.lstat);
+ fs8.statSync = statFixSync(fs8.statSync);
+ fs8.fstatSync = statFixSync(fs8.fstatSync);
+ fs8.lstatSync = statFixSync(fs8.lstatSync);
+ if (fs8.chmod && !fs8.lchmod) {
+ fs8.lchmod = function(path10, mode, cb) {
if (cb)
process.nextTick(cb);
};
- fs6.lchmodSync = function() {
+ fs8.lchmodSync = function() {
};
}
- if (fs6.chown && !fs6.lchown) {
- fs6.lchown = function(path8, uid, gid, cb) {
+ if (fs8.chown && !fs8.lchown) {
+ fs8.lchown = function(path10, uid, gid, cb) {
if (cb)
process.nextTick(cb);
};
- fs6.lchownSync = function() {
+ fs8.lchownSync = function() {
};
}
if (platform === "win32") {
- fs6.rename = typeof fs6.rename !== "function" ? fs6.rename : function(fs$rename) {
+ fs8.rename = typeof fs8.rename !== "function" ? fs8.rename : function(fs$rename) {
function rename(from, to, cb) {
var start = Date.now();
var backoff = 0;
fs$rename(from, to, function CB(er) {
if (er && (er.code === "EACCES" || er.code === "EPERM" || er.code === "EBUSY") && Date.now() - start < 6e4) {
setTimeout(function() {
- fs6.stat(to, function(stater, st) {
+ fs8.stat(to, function(stater, st) {
if (stater && stater.code === "ENOENT")
fs$rename(from, to, CB);
else
@@ -9270,9 +9294,9 @@ var require_polyfills = __commonJS({
if (Object.setPrototypeOf)
Object.setPrototypeOf(rename, fs$rename);
return rename;
- }(fs6.rename);
+ }(fs8.rename);
}
- fs6.read = typeof fs6.read !== "function" ? fs6.read : function(fs$read) {
+ fs8.read = typeof fs8.read !== "function" ? fs8.read : function(fs$read) {
function read(fd, buffer, offset, length, position, callback_) {
var callback;
if (callback_ && typeof callback_ === "function") {
@@ -9280,23 +9304,23 @@ var require_polyfills = __commonJS({
callback = function(er, _, __) {
if (er && er.code === "EAGAIN" && eagCounter < 10) {
eagCounter++;
- return fs$read.call(fs6, fd, buffer, offset, length, position, callback);
+ return fs$read.call(fs8, fd, buffer, offset, length, position, callback);
}
callback_.apply(this, arguments);
};
}
- return fs$read.call(fs6, fd, buffer, offset, length, position, callback);
+ return fs$read.call(fs8, fd, buffer, offset, length, position, callback);
}
if (Object.setPrototypeOf)
Object.setPrototypeOf(read, fs$read);
return read;
- }(fs6.read);
- fs6.readSync = typeof fs6.readSync !== "function" ? fs6.readSync : function(fs$readSync) {
+ }(fs8.read);
+ fs8.readSync = typeof fs8.readSync !== "function" ? fs8.readSync : function(fs$readSync) {
return function(fd, buffer, offset, length, position) {
var eagCounter = 0;
while (true) {
try {
- return fs$readSync.call(fs6, fd, buffer, offset, length, position);
+ return fs$readSync.call(fs8, fd, buffer, offset, length, position);
} catch (er) {
if (er.code === "EAGAIN" && eagCounter < 10) {
eagCounter++;
@@ -9306,11 +9330,11 @@ var require_polyfills = __commonJS({
}
}
};
- }(fs6.readSync);
- function patchLchmod(fs7) {
- fs7.lchmod = function(path8, mode, callback) {
- fs7.open(
- path8,
+ }(fs8.readSync);
+ function patchLchmod(fs9) {
+ fs9.lchmod = function(path10, mode, callback) {
+ fs9.open(
+ path10,
constants.O_WRONLY | constants.O_SYMLINK,
mode,
function(err, fd) {
@@ -9319,8 +9343,8 @@ var require_polyfills = __commonJS({
callback(err);
return;
}
- fs7.fchmod(fd, mode, function(err2) {
- fs7.close(fd, function(err22) {
+ fs9.fchmod(fd, mode, function(err2) {
+ fs9.close(fd, function(err22) {
if (callback)
callback(err2 || err22);
});
@@ -9328,68 +9352,68 @@ var require_polyfills = __commonJS({
}
);
};
- fs7.lchmodSync = function(path8, mode) {
- var fd = fs7.openSync(path8, constants.O_WRONLY | constants.O_SYMLINK, mode);
+ fs9.lchmodSync = function(path10, mode) {
+ var fd = fs9.openSync(path10, constants.O_WRONLY | constants.O_SYMLINK, mode);
var threw = true;
var ret;
try {
- ret = fs7.fchmodSync(fd, mode);
+ ret = fs9.fchmodSync(fd, mode);
threw = false;
} finally {
if (threw) {
try {
- fs7.closeSync(fd);
+ fs9.closeSync(fd);
} catch (er) {
}
} else {
- fs7.closeSync(fd);
+ fs9.closeSync(fd);
}
}
return ret;
};
}
- function patchLutimes(fs7) {
- if (constants.hasOwnProperty("O_SYMLINK") && fs7.futimes) {
- fs7.lutimes = function(path8, at, mt, cb) {
- fs7.open(path8, constants.O_SYMLINK, function(er, fd) {
+ function patchLutimes(fs9) {
+ if (constants.hasOwnProperty("O_SYMLINK") && fs9.futimes) {
+ fs9.lutimes = function(path10, at, mt, cb) {
+ fs9.open(path10, constants.O_SYMLINK, function(er, fd) {
if (er) {
if (cb)
cb(er);
return;
}
- fs7.futimes(fd, at, mt, function(er2) {
- fs7.close(fd, function(er22) {
+ fs9.futimes(fd, at, mt, function(er2) {
+ fs9.close(fd, function(er22) {
if (cb)
cb(er2 || er22);
});
});
});
};
- fs7.lutimesSync = function(path8, at, mt) {
- var fd = fs7.openSync(path8, constants.O_SYMLINK);
+ fs9.lutimesSync = function(path10, at, mt) {
+ var fd = fs9.openSync(path10, constants.O_SYMLINK);
var ret;
var threw = true;
try {
- ret = fs7.futimesSync(fd, at, mt);
+ ret = fs9.futimesSync(fd, at, mt);
threw = false;
} finally {
if (threw) {
try {
- fs7.closeSync(fd);
+ fs9.closeSync(fd);
} catch (er) {
}
} else {
- fs7.closeSync(fd);
+ fs9.closeSync(fd);
}
}
return ret;
};
- } else if (fs7.futimes) {
- fs7.lutimes = function(_a, _b, _c, cb) {
+ } else if (fs9.futimes) {
+ fs9.lutimes = function(_a, _b, _c, cb) {
if (cb)
process.nextTick(cb);
};
- fs7.lutimesSync = function() {
+ fs9.lutimesSync = function() {
};
}
}
@@ -9397,7 +9421,7 @@ var require_polyfills = __commonJS({
if (!orig)
return orig;
return function(target, mode, cb) {
- return orig.call(fs6, target, mode, function(er) {
+ return orig.call(fs8, target, mode, function(er) {
if (chownErOk(er))
er = null;
if (cb)
@@ -9410,7 +9434,7 @@ var require_polyfills = __commonJS({
return orig;
return function(target, mode) {
try {
- return orig.call(fs6, target, mode);
+ return orig.call(fs8, target, mode);
} catch (er) {
if (!chownErOk(er))
throw er;
@@ -9421,7 +9445,7 @@ var require_polyfills = __commonJS({
if (!orig)
return orig;
return function(target, uid, gid, cb) {
- return orig.call(fs6, target, uid, gid, function(er) {
+ return orig.call(fs8, target, uid, gid, function(er) {
if (chownErOk(er))
er = null;
if (cb)
@@ -9434,7 +9458,7 @@ var require_polyfills = __commonJS({
return orig;
return function(target, uid, gid) {
try {
- return orig.call(fs6, target, uid, gid);
+ return orig.call(fs8, target, uid, gid);
} catch (er) {
if (!chownErOk(er))
throw er;
@@ -9459,14 +9483,14 @@ var require_polyfills = __commonJS({
if (cb)
cb.apply(this, arguments);
}
- return options ? orig.call(fs6, target, options, callback) : orig.call(fs6, target, callback);
+ return options ? orig.call(fs8, target, options, callback) : orig.call(fs8, target, callback);
};
}
function statFixSync(orig) {
if (!orig)
return orig;
return function(target, options) {
- var stats = options ? orig.call(fs6, target, options) : orig.call(fs6, target);
+ var stats = options ? orig.call(fs8, target, options) : orig.call(fs8, target);
if (stats) {
if (stats.uid < 0)
stats.uid += 4294967296;
@@ -9497,17 +9521,17 @@ var require_legacy_streams = __commonJS({
".yarn/cache/graceful-fs-npm-4.2.11-24bb648a68-0228fc1080.zip/node_modules/graceful-fs/legacy-streams.js"(exports, module2) {
var Stream = require("stream").Stream;
module2.exports = legacy;
- function legacy(fs6) {
+ function legacy(fs8) {
return {
ReadStream,
WriteStream
};
- function ReadStream(path8, options) {
+ function ReadStream(path10, options) {
if (!(this instanceof ReadStream))
- return new ReadStream(path8, options);
+ return new ReadStream(path10, options);
Stream.call(this);
var self2 = this;
- this.path = path8;
+ this.path = path10;
this.fd = null;
this.readable = true;
this.paused = false;
@@ -9542,7 +9566,7 @@ var require_legacy_streams = __commonJS({
});
return;
}
- fs6.open(this.path, this.flags, this.mode, function(err, fd) {
+ fs8.open(this.path, this.flags, this.mode, function(err, fd) {
if (err) {
self2.emit("error", err);
self2.readable = false;
@@ -9553,11 +9577,11 @@ var require_legacy_streams = __commonJS({
self2._read();
});
}
- function WriteStream(path8, options) {
+ function WriteStream(path10, options) {
if (!(this instanceof WriteStream))
- return new WriteStream(path8, options);
+ return new WriteStream(path10, options);
Stream.call(this);
- this.path = path8;
+ this.path = path10;
this.fd = null;
this.writable = true;
this.flags = "w";
@@ -9582,7 +9606,7 @@ var require_legacy_streams = __commonJS({
this.busy = false;
this._queue = [];
if (this.fd === null) {
- this._open = fs6.open;
+ this._open = fs8.open;
this._queue.push([this._open, this.path, this.flags, this.mode, void 0]);
this.flush();
}
@@ -9617,7 +9641,7 @@ var require_clone = __commonJS({
// .yarn/cache/graceful-fs-npm-4.2.11-24bb648a68-0228fc1080.zip/node_modules/graceful-fs/graceful-fs.js
var require_graceful_fs = __commonJS({
".yarn/cache/graceful-fs-npm-4.2.11-24bb648a68-0228fc1080.zip/node_modules/graceful-fs/graceful-fs.js"(exports, module2) {
- var fs6 = require("fs");
+ var fs8 = require("fs");
var polyfills = require_polyfills();
var legacy = require_legacy_streams();
var clone = require_clone();
@@ -9649,12 +9673,12 @@ var require_graceful_fs = __commonJS({
m = "GFS4: " + m.split(/\n/).join("\nGFS4: ");
console.error(m);
};
- if (!fs6[gracefulQueue]) {
+ if (!fs8[gracefulQueue]) {
queue = global[gracefulQueue] || [];
- publishQueue(fs6, queue);
- fs6.close = function(fs$close) {
+ publishQueue(fs8, queue);
+ fs8.close = function(fs$close) {
function close(fd, cb) {
- return fs$close.call(fs6, fd, function(err) {
+ return fs$close.call(fs8, fd, function(err) {
if (!err) {
resetQueue();
}
@@ -9666,48 +9690,48 @@ var require_graceful_fs = __commonJS({
value: fs$close
});
return close;
- }(fs6.close);
- fs6.closeSync = function(fs$closeSync) {
+ }(fs8.close);
+ fs8.closeSync = function(fs$closeSync) {
function closeSync(fd) {
- fs$closeSync.apply(fs6, arguments);
+ fs$closeSync.apply(fs8, arguments);
resetQueue();
}
Object.defineProperty(closeSync, previousSymbol, {
value: fs$closeSync
});
return closeSync;
- }(fs6.closeSync);
+ }(fs8.closeSync);
if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || "")) {
process.on("exit", function() {
- debug2(fs6[gracefulQueue]);
- require("assert").equal(fs6[gracefulQueue].length, 0);
+ debug2(fs8[gracefulQueue]);
+ require("assert").equal(fs8[gracefulQueue].length, 0);
});
}
}
var queue;
if (!global[gracefulQueue]) {
- publishQueue(global, fs6[gracefulQueue]);
- }
- module2.exports = patch(clone(fs6));
- if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs6.__patched) {
- module2.exports = patch(fs6);
- fs6.__patched = true;
- }
- function patch(fs7) {
- polyfills(fs7);
- fs7.gracefulify = patch;
- fs7.createReadStream = createReadStream;
- fs7.createWriteStream = createWriteStream;
- var fs$readFile = fs7.readFile;
- fs7.readFile = readFile;
- function readFile(path8, options, cb) {
+ publishQueue(global, fs8[gracefulQueue]);
+ }
+ module2.exports = patch(clone(fs8));
+ if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs8.__patched) {
+ module2.exports = patch(fs8);
+ fs8.__patched = true;
+ }
+ function patch(fs9) {
+ polyfills(fs9);
+ fs9.gracefulify = patch;
+ fs9.createReadStream = createReadStream;
+ fs9.createWriteStream = createWriteStream;
+ var fs$readFile = fs9.readFile;
+ fs9.readFile = readFile;
+ function readFile(path10, options, cb) {
if (typeof options === "function")
cb = options, options = null;
- return go$readFile(path8, options, cb);
- function go$readFile(path9, options2, cb2, startTime) {
- return fs$readFile(path9, options2, function(err) {
+ return go$readFile(path10, options, cb);
+ function go$readFile(path11, options2, cb2, startTime) {
+ return fs$readFile(path11, options2, function(err) {
if (err && (err.code === "EMFILE" || err.code === "ENFILE"))
- enqueue([go$readFile, [path9, options2, cb2], err, startTime || Date.now(), Date.now()]);
+ enqueue([go$readFile, [path11, options2, cb2], err, startTime || Date.now(), Date.now()]);
else {
if (typeof cb2 === "function")
cb2.apply(this, arguments);
@@ -9715,16 +9739,16 @@ var require_graceful_fs = __commonJS({
});
}
}
- var fs$writeFile = fs7.writeFile;
- fs7.writeFile = writeFile;
- function writeFile(path8, data, options, cb) {
+ var fs$writeFile = fs9.writeFile;
+ fs9.writeFile = writeFile;
+ function writeFile(path10, data, options, cb) {
if (typeof options === "function")
cb = options, options = null;
- return go$writeFile(path8, data, options, cb);
- function go$writeFile(path9, data2, options2, cb2, startTime) {
- return fs$writeFile(path9, data2, options2, function(err) {
+ return go$writeFile(path10, data, options, cb);
+ function go$writeFile(path11, data2, options2, cb2, startTime) {
+ return fs$writeFile(path11, data2, options2, function(err) {
if (err && (err.code === "EMFILE" || err.code === "ENFILE"))
- enqueue([go$writeFile, [path9, data2, options2, cb2], err, startTime || Date.now(), Date.now()]);
+ enqueue([go$writeFile, [path11, data2, options2, cb2], err, startTime || Date.now(), Date.now()]);
else {
if (typeof cb2 === "function")
cb2.apply(this, arguments);
@@ -9732,17 +9756,17 @@ var require_graceful_fs = __commonJS({
});
}
}
- var fs$appendFile = fs7.appendFile;
+ var fs$appendFile = fs9.appendFile;
if (fs$appendFile)
- fs7.appendFile = appendFile;
- function appendFile(path8, data, options, cb) {
+ fs9.appendFile = appendFile;
+ function appendFile(path10, data, options, cb) {
if (typeof options === "function")
cb = options, options = null;
- return go$appendFile(path8, data, options, cb);
- function go$appendFile(path9, data2, options2, cb2, startTime) {
- return fs$appendFile(path9, data2, options2, function(err) {
+ return go$appendFile(path10, data, options, cb);
+ function go$appendFile(path11, data2, options2, cb2, startTime) {
+ return fs$appendFile(path11, data2, options2, function(err) {
if (err && (err.code === "EMFILE" || err.code === "ENFILE"))
- enqueue([go$appendFile, [path9, data2, options2, cb2], err, startTime || Date.now(), Date.now()]);
+ enqueue([go$appendFile, [path11, data2, options2, cb2], err, startTime || Date.now(), Date.now()]);
else {
if (typeof cb2 === "function")
cb2.apply(this, arguments);
@@ -9750,9 +9774,9 @@ var require_graceful_fs = __commonJS({
});
}
}
- var fs$copyFile = fs7.copyFile;
+ var fs$copyFile = fs9.copyFile;
if (fs$copyFile)
- fs7.copyFile = copyFile;
+ fs9.copyFile = copyFile;
function copyFile(src, dest, flags, cb) {
if (typeof flags === "function") {
cb = flags;
@@ -9770,34 +9794,34 @@ var require_graceful_fs = __commonJS({
});
}
}
- var fs$readdir = fs7.readdir;
- fs7.readdir = readdir;
+ var fs$readdir = fs9.readdir;
+ fs9.readdir = readdir;
var noReaddirOptionVersions = /^v[0-5]\./;
- function readdir(path8, options, cb) {
+ function readdir(path10, options, cb) {
if (typeof options === "function")
cb = options, options = null;
- var go$readdir = noReaddirOptionVersions.test(process.version) ? function go$readdir2(path9, options2, cb2, startTime) {
- return fs$readdir(path9, fs$readdirCallback(
- path9,
+ var go$readdir = noReaddirOptionVersions.test(process.version) ? function go$readdir2(path11, options2, cb2, startTime) {
+ return fs$readdir(path11, fs$readdirCallback(
+ path11,
options2,
cb2,
startTime
));
- } : function go$readdir2(path9, options2, cb2, startTime) {
- return fs$readdir(path9, options2, fs$readdirCallback(
- path9,
+ } : function go$readdir2(path11, options2, cb2, startTime) {
+ return fs$readdir(path11, options2, fs$readdirCallback(
+ path11,
options2,
cb2,
startTime
));
};
- return go$readdir(path8, options, cb);
- function fs$readdirCallback(path9, options2, cb2, startTime) {
+ return go$readdir(path10, options, cb);
+ function fs$readdirCallback(path11, options2, cb2, startTime) {
return function(err, files) {
if (err && (err.code === "EMFILE" || err.code === "ENFILE"))
enqueue([
go$readdir,
- [path9, options2, cb2],
+ [path11, options2, cb2],
err,
startTime || Date.now(),
Date.now()
@@ -9812,21 +9836,21 @@ var require_graceful_fs = __commonJS({
}
}
if (process.version.substr(0, 4) === "v0.8") {
- var legStreams = legacy(fs7);
+ var legStreams = legacy(fs9);
ReadStream = legStreams.ReadStream;
WriteStream = legStreams.WriteStream;
}
- var fs$ReadStream = fs7.ReadStream;
+ var fs$ReadStream = fs9.ReadStream;
if (fs$ReadStream) {
ReadStream.prototype = Object.create(fs$ReadStream.prototype);
ReadStream.prototype.open = ReadStream$open;
}
- var fs$WriteStream = fs7.WriteStream;
+ var fs$WriteStream = fs9.WriteStream;
if (fs$WriteStream) {
WriteStream.prototype = Object.create(fs$WriteStream.prototype);
WriteStream.prototype.open = WriteStream$open;
}
- Object.defineProperty(fs7, "ReadStream", {
+ Object.defineProperty(fs9, "ReadStream", {
get: function() {
return ReadStream;
},
@@ -9836,7 +9860,7 @@ var require_graceful_fs = __commonJS({
enumerable: true,
configurable: true
});
- Object.defineProperty(fs7, "WriteStream", {
+ Object.defineProperty(fs9, "WriteStream", {
get: function() {
return WriteStream;
},
@@ -9847,7 +9871,7 @@ var require_graceful_fs = __commonJS({
configurable: true
});
var FileReadStream = ReadStream;
- Object.defineProperty(fs7, "FileReadStream", {
+ Object.defineProperty(fs9, "FileReadStream", {
get: function() {
return FileReadStream;
},
@@ -9858,7 +9882,7 @@ var require_graceful_fs = __commonJS({
configurable: true
});
var FileWriteStream = WriteStream;
- Object.defineProperty(fs7, "FileWriteStream", {
+ Object.defineProperty(fs9, "FileWriteStream", {
get: function() {
return FileWriteStream;
},
@@ -9868,7 +9892,7 @@ var require_graceful_fs = __commonJS({
enumerable: true,
configurable: true
});
- function ReadStream(path8, options) {
+ function ReadStream(path10, options) {
if (this instanceof ReadStream)
return fs$ReadStream.apply(this, arguments), this;
else
@@ -9888,7 +9912,7 @@ var require_graceful_fs = __commonJS({
}
});
}
- function WriteStream(path8, options) {
+ function WriteStream(path10, options) {
if (this instanceof WriteStream)
return fs$WriteStream.apply(this, arguments), this;
else
@@ -9906,22 +9930,22 @@ var require_graceful_fs = __commonJS({
}
});
}
- function createReadStream(path8, options) {
- return new fs7.ReadStream(path8, options);
+ function createReadStream(path10, options) {
+ return new fs9.ReadStream(path10, options);
}
- function createWriteStream(path8, options) {
- return new fs7.WriteStream(path8, options);
+ function createWriteStream(path10, options) {
+ return new fs9.WriteStream(path10, options);
}
- var fs$open = fs7.open;
- fs7.open = open;
- function open(path8, flags, mode, cb) {
+ var fs$open = fs9.open;
+ fs9.open = open;
+ function open(path10, flags, mode, cb) {
if (typeof mode === "function")
cb = mode, mode = null;
- return go$open(path8, flags, mode, cb);
- function go$open(path9, flags2, mode2, cb2, startTime) {
- return fs$open(path9, flags2, mode2, function(err, fd) {
+ return go$open(path10, flags, mode, cb);
+ function go$open(path11, flags2, mode2, cb2, startTime) {
+ return fs$open(path11, flags2, mode2, function(err, fd) {
if (err && (err.code === "EMFILE" || err.code === "ENFILE"))
- enqueue([go$open, [path9, flags2, mode2, cb2], err, startTime || Date.now(), Date.now()]);
+ enqueue([go$open, [path11, flags2, mode2, cb2], err, startTime || Date.now(), Date.now()]);
else {
if (typeof cb2 === "function")
cb2.apply(this, arguments);
@@ -9929,20 +9953,20 @@ var require_graceful_fs = __commonJS({
});
}
}
- return fs7;
+ return fs9;
}
function enqueue(elem) {
debug2("ENQUEUE", elem[0].name, elem[1]);
- fs6[gracefulQueue].push(elem);
+ fs8[gracefulQueue].push(elem);
retry();
}
var retryTimer;
function resetQueue() {
var now = Date.now();
- for (var i = 0; i < fs6[gracefulQueue].length; ++i) {
- if (fs6[gracefulQueue][i].length > 2) {
- fs6[gracefulQueue][i][3] = now;
- fs6[gracefulQueue][i][4] = now;
+ for (var i = 0; i < fs8[gracefulQueue].length; ++i) {
+ if (fs8[gracefulQueue][i].length > 2) {
+ fs8[gracefulQueue][i][3] = now;
+ fs8[gracefulQueue][i][4] = now;
}
}
retry();
@@ -9950,9 +9974,9 @@ var require_graceful_fs = __commonJS({
function retry() {
clearTimeout(retryTimer);
retryTimer = void 0;
- if (fs6[gracefulQueue].length === 0)
+ if (fs8[gracefulQueue].length === 0)
return;
- var elem = fs6[gracefulQueue].shift();
+ var elem = fs8[gracefulQueue].shift();
var fn2 = elem[0];
var args = elem[1];
var err = elem[2];
@@ -9974,7 +9998,7 @@ var require_graceful_fs = __commonJS({
debug2("RETRY", fn2.name, args);
fn2.apply(null, args.concat([startTime]));
} else {
- fs6[gracefulQueue].push(elem);
+ fs8[gracefulQueue].push(elem);
}
}
if (retryTimer === void 0) {
@@ -9989,7 +10013,7 @@ var require_fs = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/fs/index.js"(exports) {
"use strict";
var u = require_universalify().fromCallback;
- var fs6 = require_graceful_fs();
+ var fs8 = require_graceful_fs();
var api = [
"access",
"appendFile",
@@ -10024,31 +10048,31 @@ var require_fs = __commonJS({
"utimes",
"writeFile"
].filter((key) => {
- return typeof fs6[key] === "function";
+ return typeof fs8[key] === "function";
});
- Object.keys(fs6).forEach((key) => {
+ Object.keys(fs8).forEach((key) => {
if (key === "promises") {
return;
}
- exports[key] = fs6[key];
+ exports[key] = fs8[key];
});
api.forEach((method) => {
- exports[method] = u(fs6[method]);
+ exports[method] = u(fs8[method]);
});
exports.exists = function(filename, callback) {
if (typeof callback === "function") {
- return fs6.exists(filename, callback);
+ return fs8.exists(filename, callback);
}
return new Promise((resolve) => {
- return fs6.exists(filename, resolve);
+ return fs8.exists(filename, resolve);
});
};
exports.read = function(fd, buffer, offset, length, position, callback) {
if (typeof callback === "function") {
- return fs6.read(fd, buffer, offset, length, position, callback);
+ return fs8.read(fd, buffer, offset, length, position, callback);
}
return new Promise((resolve, reject) => {
- fs6.read(fd, buffer, offset, length, position, (err, bytesRead, buffer2) => {
+ fs8.read(fd, buffer, offset, length, position, (err, bytesRead, buffer2) => {
if (err)
return reject(err);
resolve({ bytesRead, buffer: buffer2 });
@@ -10057,18 +10081,18 @@ var require_fs = __commonJS({
};
exports.write = function(fd, buffer, ...args) {
if (typeof args[args.length - 1] === "function") {
- return fs6.write(fd, buffer, ...args);
+ return fs8.write(fd, buffer, ...args);
}
return new Promise((resolve, reject) => {
- fs6.write(fd, buffer, ...args, (err, bytesWritten, buffer2) => {
+ fs8.write(fd, buffer, ...args, (err, bytesWritten, buffer2) => {
if (err)
return reject(err);
resolve({ bytesWritten, buffer: buffer2 });
});
});
};
- if (typeof fs6.realpath.native === "function") {
- exports.realpath.native = u(fs6.realpath.native);
+ if (typeof fs8.realpath.native === "function") {
+ exports.realpath.native = u(fs8.realpath.native);
}
}
});
@@ -10077,9 +10101,9 @@ var require_fs = __commonJS({
var require_win32 = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/mkdirs/win32.js"(exports, module2) {
"use strict";
- var path8 = require("path");
+ var path10 = require("path");
function getRootPath(p) {
- p = path8.normalize(path8.resolve(p)).split(path8.sep);
+ p = path10.normalize(path10.resolve(p)).split(path10.sep);
if (p.length > 0)
return p[0];
return null;
@@ -10101,8 +10125,8 @@ var require_win32 = __commonJS({
var require_mkdirs = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/mkdirs/mkdirs.js"(exports, module2) {
"use strict";
- var fs6 = require_graceful_fs();
- var path8 = require("path");
+ var fs8 = require_graceful_fs();
+ var path10 = require("path");
var invalidWin32Path = require_win32().invalidWin32Path;
var o777 = parseInt("0777", 8);
function mkdirs(p, opts, callback, made) {
@@ -10118,7 +10142,7 @@ var require_mkdirs = __commonJS({
return callback(errInval);
}
let mode = opts.mode;
- const xfs = opts.fs || fs6;
+ const xfs = opts.fs || fs8;
if (mode === void 0) {
mode = o777 & ~process.umask();
}
@@ -10126,7 +10150,7 @@ var require_mkdirs = __commonJS({
made = null;
callback = callback || function() {
};
- p = path8.resolve(p);
+ p = path10.resolve(p);
xfs.mkdir(p, mode, (er) => {
if (!er) {
made = made || p;
@@ -10134,9 +10158,9 @@ var require_mkdirs = __commonJS({
}
switch (er.code) {
case "ENOENT":
- if (path8.dirname(p) === p)
+ if (path10.dirname(p) === p)
return callback(er);
- mkdirs(path8.dirname(p), opts, (er2, made2) => {
+ mkdirs(path10.dirname(p), opts, (er2, made2) => {
if (er2)
callback(er2, made2);
else
@@ -10162,8 +10186,8 @@ var require_mkdirs = __commonJS({
var require_mkdirs_sync = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/mkdirs/mkdirs-sync.js"(exports, module2) {
"use strict";
- var fs6 = require_graceful_fs();
- var path8 = require("path");
+ var fs8 = require_graceful_fs();
+ var path10 = require("path");
var invalidWin32Path = require_win32().invalidWin32Path;
var o777 = parseInt("0777", 8);
function mkdirsSync(p, opts, made) {
@@ -10171,7 +10195,7 @@ var require_mkdirs_sync = __commonJS({
opts = { mode: opts };
}
let mode = opts.mode;
- const xfs = opts.fs || fs6;
+ const xfs = opts.fs || fs8;
if (process.platform === "win32" && invalidWin32Path(p)) {
const errInval = new Error(p + " contains invalid WIN32 path characters.");
errInval.code = "EINVAL";
@@ -10182,15 +10206,15 @@ var require_mkdirs_sync = __commonJS({
}
if (!made)
made = null;
- p = path8.resolve(p);
+ p = path10.resolve(p);
try {
xfs.mkdirSync(p, mode);
made = made || p;
} catch (err0) {
if (err0.code === "ENOENT") {
- if (path8.dirname(p) === p)
+ if (path10.dirname(p) === p)
throw err0;
- made = mkdirsSync(path8.dirname(p), opts, made);
+ made = mkdirsSync(path10.dirname(p), opts, made);
mkdirsSync(p, opts, made);
} else {
let stat;
@@ -10232,36 +10256,36 @@ var require_mkdirs2 = __commonJS({
var require_utimes = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/util/utimes.js"(exports, module2) {
"use strict";
- var fs6 = require_graceful_fs();
- var os2 = require("os");
- var path8 = require("path");
+ var fs8 = require_graceful_fs();
+ var os3 = require("os");
+ var path10 = require("path");
function hasMillisResSync() {
- let tmpfile = path8.join("millis-test-sync" + Date.now().toString() + Math.random().toString().slice(2));
- tmpfile = path8.join(os2.tmpdir(), tmpfile);
+ let tmpfile = path10.join("millis-test-sync" + Date.now().toString() + Math.random().toString().slice(2));
+ tmpfile = path10.join(os3.tmpdir(), tmpfile);
const d = /* @__PURE__ */ new Date(1435410243862);
- fs6.writeFileSync(tmpfile, "https://github.com/jprichardson/node-fs-extra/pull/141");
- const fd = fs6.openSync(tmpfile, "r+");
- fs6.futimesSync(fd, d, d);
- fs6.closeSync(fd);
- return fs6.statSync(tmpfile).mtime > 1435410243e3;
+ fs8.writeFileSync(tmpfile, "https://github.com/jprichardson/node-fs-extra/pull/141");
+ const fd = fs8.openSync(tmpfile, "r+");
+ fs8.futimesSync(fd, d, d);
+ fs8.closeSync(fd);
+ return fs8.statSync(tmpfile).mtime > 1435410243e3;
}
function hasMillisRes(callback) {
- let tmpfile = path8.join("millis-test" + Date.now().toString() + Math.random().toString().slice(2));
- tmpfile = path8.join(os2.tmpdir(), tmpfile);
+ let tmpfile = path10.join("millis-test" + Date.now().toString() + Math.random().toString().slice(2));
+ tmpfile = path10.join(os3.tmpdir(), tmpfile);
const d = /* @__PURE__ */ new Date(1435410243862);
- fs6.writeFile(tmpfile, "https://github.com/jprichardson/node-fs-extra/pull/141", (err) => {
+ fs8.writeFile(tmpfile, "https://github.com/jprichardson/node-fs-extra/pull/141", (err) => {
if (err)
return callback(err);
- fs6.open(tmpfile, "r+", (err2, fd) => {
+ fs8.open(tmpfile, "r+", (err2, fd) => {
if (err2)
return callback(err2);
- fs6.futimes(fd, d, d, (err3) => {
+ fs8.futimes(fd, d, d, (err3) => {
if (err3)
return callback(err3);
- fs6.close(fd, (err4) => {
+ fs8.close(fd, (err4) => {
if (err4)
return callback(err4);
- fs6.stat(tmpfile, (err5, stats) => {
+ fs8.stat(tmpfile, (err5, stats) => {
if (err5)
return callback(err5);
callback(null, stats.mtime > 1435410243e3);
@@ -10280,22 +10304,22 @@ var require_utimes = __commonJS({
throw new Error("fs-extra: timeRemoveMillis() unknown parameter type");
}
}
- function utimesMillis(path9, atime, mtime, callback) {
- fs6.open(path9, "r+", (err, fd) => {
+ function utimesMillis(path11, atime, mtime, callback) {
+ fs8.open(path11, "r+", (err, fd) => {
if (err)
return callback(err);
- fs6.futimes(fd, atime, mtime, (futimesErr) => {
- fs6.close(fd, (closeErr) => {
+ fs8.futimes(fd, atime, mtime, (futimesErr) => {
+ fs8.close(fd, (closeErr) => {
if (callback)
callback(futimesErr || closeErr);
});
});
});
}
- function utimesMillisSync(path9, atime, mtime) {
- const fd = fs6.openSync(path9, "r+");
- fs6.futimesSync(fd, atime, mtime);
- return fs6.closeSync(fd);
+ function utimesMillisSync(path11, atime, mtime) {
+ const fd = fs8.openSync(path11, "r+");
+ fs8.futimesSync(fd, atime, mtime);
+ return fs8.closeSync(fd);
}
module2.exports = {
hasMillisRes,
@@ -10311,8 +10335,8 @@ var require_utimes = __commonJS({
var require_stat = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/util/stat.js"(exports, module2) {
"use strict";
- var fs6 = require_graceful_fs();
- var path8 = require("path");
+ var fs8 = require_graceful_fs();
+ var path10 = require("path");
var NODE_VERSION_MAJOR_WITH_BIGINT = 10;
var NODE_VERSION_MINOR_WITH_BIGINT = 5;
var NODE_VERSION_PATCH_WITH_BIGINT = 0;
@@ -10336,10 +10360,10 @@ var require_stat = __commonJS({
}
function getStats(src, dest, cb) {
if (nodeSupportsBigInt()) {
- fs6.stat(src, { bigint: true }, (err, srcStat) => {
+ fs8.stat(src, { bigint: true }, (err, srcStat) => {
if (err)
return cb(err);
- fs6.stat(dest, { bigint: true }, (err2, destStat) => {
+ fs8.stat(dest, { bigint: true }, (err2, destStat) => {
if (err2) {
if (err2.code === "ENOENT")
return cb(null, { srcStat, destStat: null });
@@ -10349,10 +10373,10 @@ var require_stat = __commonJS({
});
});
} else {
- fs6.stat(src, (err, srcStat) => {
+ fs8.stat(src, (err, srcStat) => {
if (err)
return cb(err);
- fs6.stat(dest, (err2, destStat) => {
+ fs8.stat(dest, (err2, destStat) => {
if (err2) {
if (err2.code === "ENOENT")
return cb(null, { srcStat, destStat: null });
@@ -10366,15 +10390,15 @@ var require_stat = __commonJS({
function getStatsSync(src, dest) {
let srcStat, destStat;
if (nodeSupportsBigInt()) {
- srcStat = fs6.statSync(src, { bigint: true });
+ srcStat = fs8.statSync(src, { bigint: true });
} else {
- srcStat = fs6.statSync(src);
+ srcStat = fs8.statSync(src);
}
try {
if (nodeSupportsBigInt()) {
- destStat = fs6.statSync(dest, { bigint: true });
+ destStat = fs8.statSync(dest, { bigint: true });
} else {
- destStat = fs6.statSync(dest);
+ destStat = fs8.statSync(dest);
}
} catch (err) {
if (err.code === "ENOENT")
@@ -10408,12 +10432,12 @@ var require_stat = __commonJS({
return { srcStat, destStat };
}
function checkParentPaths(src, srcStat, dest, funcName, cb) {
- const srcParent = path8.resolve(path8.dirname(src));
- const destParent = path8.resolve(path8.dirname(dest));
- if (destParent === srcParent || destParent === path8.parse(destParent).root)
+ const srcParent = path10.resolve(path10.dirname(src));
+ const destParent = path10.resolve(path10.dirname(dest));
+ if (destParent === srcParent || destParent === path10.parse(destParent).root)
return cb();
if (nodeSupportsBigInt()) {
- fs6.stat(destParent, { bigint: true }, (err, destStat) => {
+ fs8.stat(destParent, { bigint: true }, (err, destStat) => {
if (err) {
if (err.code === "ENOENT")
return cb();
@@ -10425,7 +10449,7 @@ var require_stat = __commonJS({
return checkParentPaths(src, srcStat, destParent, funcName, cb);
});
} else {
- fs6.stat(destParent, (err, destStat) => {
+ fs8.stat(destParent, (err, destStat) => {
if (err) {
if (err.code === "ENOENT")
return cb();
@@ -10439,16 +10463,16 @@ var require_stat = __commonJS({
}
}
function checkParentPathsSync(src, srcStat, dest, funcName) {
- const srcParent = path8.resolve(path8.dirname(src));
- const destParent = path8.resolve(path8.dirname(dest));
- if (destParent === srcParent || destParent === path8.parse(destParent).root)
+ const srcParent = path10.resolve(path10.dirname(src));
+ const destParent = path10.resolve(path10.dirname(dest));
+ if (destParent === srcParent || destParent === path10.parse(destParent).root)
return;
let destStat;
try {
if (nodeSupportsBigInt()) {
- destStat = fs6.statSync(destParent, { bigint: true });
+ destStat = fs8.statSync(destParent, { bigint: true });
} else {
- destStat = fs6.statSync(destParent);
+ destStat = fs8.statSync(destParent);
}
} catch (err) {
if (err.code === "ENOENT")
@@ -10461,8 +10485,8 @@ var require_stat = __commonJS({
return checkParentPathsSync(src, srcStat, destParent, funcName);
}
function isSrcSubdir(src, dest) {
- const srcArr = path8.resolve(src).split(path8.sep).filter((i) => i);
- const destArr = path8.resolve(dest).split(path8.sep).filter((i) => i);
+ const srcArr = path10.resolve(src).split(path10.sep).filter((i) => i);
+ const destArr = path10.resolve(dest).split(path10.sep).filter((i) => i);
return srcArr.reduce((acc, cur, i) => acc && destArr[i] === cur, true);
}
function errMsg(src, dest, funcName) {
@@ -10499,8 +10523,8 @@ var require_buffer = __commonJS({
var require_copy_sync = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/copy-sync/copy-sync.js"(exports, module2) {
"use strict";
- var fs6 = require_graceful_fs();
- var path8 = require("path");
+ var fs8 = require_graceful_fs();
+ var path10 = require("path");
var mkdirpSync = require_mkdirs2().mkdirsSync;
var utimesSync = require_utimes().utimesMillisSync;
var stat = require_stat();
@@ -10523,8 +10547,8 @@ var require_copy_sync = __commonJS({
function handleFilterAndCopy(destStat, src, dest, opts) {
if (opts.filter && !opts.filter(src, dest))
return;
- const destParent = path8.dirname(dest);
- if (!fs6.existsSync(destParent))
+ const destParent = path10.dirname(dest);
+ if (!fs8.existsSync(destParent))
mkdirpSync(destParent);
return startCopy(destStat, src, dest, opts);
}
@@ -10534,7 +10558,7 @@ var require_copy_sync = __commonJS({
return getStats(destStat, src, dest, opts);
}
function getStats(destStat, src, dest, opts) {
- const statSync = opts.dereference ? fs6.statSync : fs6.lstatSync;
+ const statSync = opts.dereference ? fs8.statSync : fs8.lstatSync;
const srcStat = statSync(src);
if (srcStat.isDirectory())
return onDir(srcStat, destStat, src, dest, opts);
@@ -10550,16 +10574,16 @@ var require_copy_sync = __commonJS({
}
function mayCopyFile(srcStat, src, dest, opts) {
if (opts.overwrite) {
- fs6.unlinkSync(dest);
+ fs8.unlinkSync(dest);
return copyFile(srcStat, src, dest, opts);
} else if (opts.errorOnExist) {
throw new Error(`'${dest}' already exists`);
}
}
function copyFile(srcStat, src, dest, opts) {
- if (typeof fs6.copyFileSync === "function") {
- fs6.copyFileSync(src, dest);
- fs6.chmodSync(dest, srcStat.mode);
+ if (typeof fs8.copyFileSync === "function") {
+ fs8.copyFileSync(src, dest);
+ fs8.chmodSync(dest, srcStat.mode);
if (opts.preserveTimestamps) {
return utimesSync(dest, srcStat.atime, srcStat.mtime);
}
@@ -10570,18 +10594,18 @@ var require_copy_sync = __commonJS({
function copyFileFallback(srcStat, src, dest, opts) {
const BUF_LENGTH = 64 * 1024;
const _buff = require_buffer()(BUF_LENGTH);
- const fdr = fs6.openSync(src, "r");
- const fdw = fs6.openSync(dest, "w", srcStat.mode);
+ const fdr = fs8.openSync(src, "r");
+ const fdw = fs8.openSync(dest, "w", srcStat.mode);
let pos = 0;
while (pos < srcStat.size) {
- const bytesRead = fs6.readSync(fdr, _buff, 0, BUF_LENGTH, pos);
- fs6.writeSync(fdw, _buff, 0, bytesRead);
+ const bytesRead = fs8.readSync(fdr, _buff, 0, BUF_LENGTH, pos);
+ fs8.writeSync(fdw, _buff, 0, bytesRead);
pos += bytesRead;
}
if (opts.preserveTimestamps)
- fs6.futimesSync(fdw, srcStat.atime, srcStat.mtime);
- fs6.closeSync(fdr);
- fs6.closeSync(fdw);
+ fs8.futimesSync(fdw, srcStat.atime, srcStat.mtime);
+ fs8.closeSync(fdr);
+ fs8.closeSync(fdw);
}
function onDir(srcStat, destStat, src, dest, opts) {
if (!destStat)
@@ -10592,50 +10616,50 @@ var require_copy_sync = __commonJS({
return copyDir(src, dest, opts);
}
function mkDirAndCopy(srcStat, src, dest, opts) {
- fs6.mkdirSync(dest);
+ fs8.mkdirSync(dest);
copyDir(src, dest, opts);
- return fs6.chmodSync(dest, srcStat.mode);
+ return fs8.chmodSync(dest, srcStat.mode);
}
function copyDir(src, dest, opts) {
- fs6.readdirSync(src).forEach((item) => copyDirItem(item, src, dest, opts));
+ fs8.readdirSync(src).forEach((item) => copyDirItem(item, src, dest, opts));
}
function copyDirItem(item, src, dest, opts) {
- const srcItem = path8.join(src, item);
- const destItem = path8.join(dest, item);
+ const srcItem = path10.join(src, item);
+ const destItem = path10.join(dest, item);
const { destStat } = stat.checkPathsSync(srcItem, destItem, "copy");
return startCopy(destStat, srcItem, destItem, opts);
}
function onLink(destStat, src, dest, opts) {
- let resolvedSrc = fs6.readlinkSync(src);
+ let resolvedSrc = fs8.readlinkSync(src);
if (opts.dereference) {
- resolvedSrc = path8.resolve(process.cwd(), resolvedSrc);
+ resolvedSrc = path10.resolve(process.cwd(), resolvedSrc);
}
if (!destStat) {
- return fs6.symlinkSync(resolvedSrc, dest);
+ return fs8.symlinkSync(resolvedSrc, dest);
} else {
let resolvedDest;
try {
- resolvedDest = fs6.readlinkSync(dest);
+ resolvedDest = fs8.readlinkSync(dest);
} catch (err) {
if (err.code === "EINVAL" || err.code === "UNKNOWN")
- return fs6.symlinkSync(resolvedSrc, dest);
+ return fs8.symlinkSync(resolvedSrc, dest);
throw err;
}
if (opts.dereference) {
- resolvedDest = path8.resolve(process.cwd(), resolvedDest);
+ resolvedDest = path10.resolve(process.cwd(), resolvedDest);
}
if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {
throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`);
}
- if (fs6.statSync(dest).isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) {
+ if (fs8.statSync(dest).isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) {
throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`);
}
return copyLink(resolvedSrc, dest);
}
}
function copyLink(resolvedSrc, dest) {
- fs6.unlinkSync(dest);
- return fs6.symlinkSync(resolvedSrc, dest);
+ fs8.unlinkSync(dest);
+ return fs8.symlinkSync(resolvedSrc, dest);
}
module2.exports = copySync;
}
@@ -10656,13 +10680,13 @@ var require_path_exists = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/path-exists/index.js"(exports, module2) {
"use strict";
var u = require_universalify().fromPromise;
- var fs6 = require_fs();
- function pathExists(path8) {
- return fs6.access(path8).then(() => true).catch(() => false);
+ var fs8 = require_fs();
+ function pathExists(path10) {
+ return fs8.access(path10).then(() => true).catch(() => false);
}
module2.exports = {
pathExists: u(pathExists),
- pathExistsSync: fs6.existsSync
+ pathExistsSync: fs8.existsSync
};
}
});
@@ -10671,8 +10695,8 @@ var require_path_exists = __commonJS({
var require_copy = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/copy/copy.js"(exports, module2) {
"use strict";
- var fs6 = require_graceful_fs();
- var path8 = require("path");
+ var fs8 = require_graceful_fs();
+ var path10 = require("path");
var mkdirp = require_mkdirs2().mkdirs;
var pathExists = require_path_exists().pathExists;
var utimes = require_utimes().utimesMillis;
@@ -10708,7 +10732,7 @@ var require_copy = __commonJS({
});
}
function checkParentDir(destStat, src, dest, opts, cb) {
- const destParent = path8.dirname(dest);
+ const destParent = path10.dirname(dest);
pathExists(destParent, (err, dirExists) => {
if (err)
return cb(err);
@@ -10734,7 +10758,7 @@ var require_copy = __commonJS({
return getStats(destStat, src, dest, opts, cb);
}
function getStats(destStat, src, dest, opts, cb) {
- const stat2 = opts.dereference ? fs6.stat : fs6.lstat;
+ const stat2 = opts.dereference ? fs8.stat : fs8.lstat;
stat2(src, (err, srcStat) => {
if (err)
return cb(err);
@@ -10753,7 +10777,7 @@ var require_copy = __commonJS({
}
function mayCopyFile(srcStat, src, dest, opts, cb) {
if (opts.overwrite) {
- fs6.unlink(dest, (err) => {
+ fs8.unlink(dest, (err) => {
if (err)
return cb(err);
return copyFile(srcStat, src, dest, opts, cb);
@@ -10764,8 +10788,8 @@ var require_copy = __commonJS({
return cb();
}
function copyFile(srcStat, src, dest, opts, cb) {
- if (typeof fs6.copyFile === "function") {
- return fs6.copyFile(src, dest, (err) => {
+ if (typeof fs8.copyFile === "function") {
+ return fs8.copyFile(src, dest, (err) => {
if (err)
return cb(err);
return setDestModeAndTimestamps(srcStat, dest, opts, cb);
@@ -10774,14 +10798,14 @@ var require_copy = __commonJS({
return copyFileFallback(srcStat, src, dest, opts, cb);
}
function copyFileFallback(srcStat, src, dest, opts, cb) {
- const rs = fs6.createReadStream(src);
+ const rs = fs8.createReadStream(src);
rs.on("error", (err) => cb(err)).once("open", () => {
- const ws = fs6.createWriteStream(dest, { mode: srcStat.mode });
+ const ws = fs8.createWriteStream(dest, { mode: srcStat.mode });
ws.on("error", (err) => cb(err)).on("open", () => rs.pipe(ws)).once("close", () => setDestModeAndTimestamps(srcStat, dest, opts, cb));
});
}
function setDestModeAndTimestamps(srcStat, dest, opts, cb) {
- fs6.chmod(dest, srcStat.mode, (err) => {
+ fs8.chmod(dest, srcStat.mode, (err) => {
if (err)
return cb(err);
if (opts.preserveTimestamps) {
@@ -10799,18 +10823,18 @@ var require_copy = __commonJS({
return copyDir(src, dest, opts, cb);
}
function mkDirAndCopy(srcStat, src, dest, opts, cb) {
- fs6.mkdir(dest, (err) => {
+ fs8.mkdir(dest, (err) => {
if (err)
return cb(err);
copyDir(src, dest, opts, (err2) => {
if (err2)
return cb(err2);
- return fs6.chmod(dest, srcStat.mode, cb);
+ return fs8.chmod(dest, srcStat.mode, cb);
});
});
}
function copyDir(src, dest, opts, cb) {
- fs6.readdir(src, (err, items) => {
+ fs8.readdir(src, (err, items) => {
if (err)
return cb(err);
return copyDirItems(items, src, dest, opts, cb);
@@ -10823,8 +10847,8 @@ var require_copy = __commonJS({
return copyDirItem(items, item, src, dest, opts, cb);
}
function copyDirItem(items, item, src, dest, opts, cb) {
- const srcItem = path8.join(src, item);
- const destItem = path8.join(dest, item);
+ const srcItem = path10.join(src, item);
+ const destItem = path10.join(dest, item);
stat.checkPaths(srcItem, destItem, "copy", (err, stats) => {
if (err)
return cb(err);
@@ -10837,23 +10861,23 @@ var require_copy = __commonJS({
});
}
function onLink(destStat, src, dest, opts, cb) {
- fs6.readlink(src, (err, resolvedSrc) => {
+ fs8.readlink(src, (err, resolvedSrc) => {
if (err)
return cb(err);
if (opts.dereference) {
- resolvedSrc = path8.resolve(process.cwd(), resolvedSrc);
+ resolvedSrc = path10.resolve(process.cwd(), resolvedSrc);
}
if (!destStat) {
- return fs6.symlink(resolvedSrc, dest, cb);
+ return fs8.symlink(resolvedSrc, dest, cb);
} else {
- fs6.readlink(dest, (err2, resolvedDest) => {
+ fs8.readlink(dest, (err2, resolvedDest) => {
if (err2) {
if (err2.code === "EINVAL" || err2.code === "UNKNOWN")
- return fs6.symlink(resolvedSrc, dest, cb);
+ return fs8.symlink(resolvedSrc, dest, cb);
return cb(err2);
}
if (opts.dereference) {
- resolvedDest = path8.resolve(process.cwd(), resolvedDest);
+ resolvedDest = path10.resolve(process.cwd(), resolvedDest);
}
if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) {
return cb(new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`));
@@ -10867,10 +10891,10 @@ var require_copy = __commonJS({
});
}
function copyLink(resolvedSrc, dest, cb) {
- fs6.unlink(dest, (err) => {
+ fs8.unlink(dest, (err) => {
if (err)
return cb(err);
- return fs6.symlink(resolvedSrc, dest, cb);
+ return fs8.symlink(resolvedSrc, dest, cb);
});
}
module2.exports = copy;
@@ -10892,8 +10916,8 @@ var require_copy2 = __commonJS({
var require_rimraf = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/remove/rimraf.js"(exports, module2) {
"use strict";
- var fs6 = require_graceful_fs();
- var path8 = require("path");
+ var fs8 = require_graceful_fs();
+ var path10 = require("path");
var assert2 = require("assert");
var isWindows = process.platform === "win32";
function defaults(options) {
@@ -10906,9 +10930,9 @@ var require_rimraf = __commonJS({
"readdir"
];
methods.forEach((m) => {
- options[m] = options[m] || fs6[m];
+ options[m] = options[m] || fs8[m];
m = m + "Sync";
- options[m] = options[m] || fs6[m];
+ options[m] = options[m] || fs8[m];
});
options.maxBusyTries = options.maxBusyTries || 3;
}
@@ -11050,7 +11074,7 @@ var require_rimraf = __commonJS({
if (n === 0)
return options.rmdir(p, cb);
files.forEach((f) => {
- rimraf2(path8.join(p, f), options, (er2) => {
+ rimraf2(path10.join(p, f), options, (er2) => {
if (errState) {
return;
}
@@ -11119,7 +11143,7 @@ var require_rimraf = __commonJS({
function rmkidsSync(p, options) {
assert2(p);
assert2(options);
- options.readdirSync(p).forEach((f) => rimrafSync(path8.join(p, f), options));
+ options.readdirSync(p).forEach((f) => rimrafSync(path10.join(p, f), options));
if (isWindows) {
const startTime = Date.now();
do {
@@ -11157,17 +11181,17 @@ var require_empty = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/empty/index.js"(exports, module2) {
"use strict";
var u = require_universalify().fromCallback;
- var fs6 = require_graceful_fs();
- var path8 = require("path");
- var mkdir3 = require_mkdirs2();
+ var fs8 = require_graceful_fs();
+ var path10 = require("path");
+ var mkdir4 = require_mkdirs2();
var remove = require_remove();
var emptyDir = u(function emptyDir2(dir, callback) {
callback = callback || function() {
};
- fs6.readdir(dir, (err, items) => {
+ fs8.readdir(dir, (err, items) => {
if (err)
- return mkdir3.mkdirs(dir, callback);
- items = items.map((item) => path8.join(dir, item));
+ return mkdir4.mkdirs(dir, callback);
+ items = items.map((item) => path10.join(dir, item));
deleteItem();
function deleteItem() {
const item = items.pop();
@@ -11184,12 +11208,12 @@ var require_empty = __commonJS({
function emptyDirSync(dir) {
let items;
try {
- items = fs6.readdirSync(dir);
+ items = fs8.readdirSync(dir);
} catch (err) {
- return mkdir3.mkdirsSync(dir);
+ return mkdir4.mkdirsSync(dir);
}
items.forEach((item) => {
- item = path8.join(dir, item);
+ item = path10.join(dir, item);
remove.removeSync(item);
});
}
@@ -11207,28 +11231,28 @@ var require_file = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/ensure/file.js"(exports, module2) {
"use strict";
var u = require_universalify().fromCallback;
- var path8 = require("path");
- var fs6 = require_graceful_fs();
- var mkdir3 = require_mkdirs2();
+ var path10 = require("path");
+ var fs8 = require_graceful_fs();
+ var mkdir4 = require_mkdirs2();
var pathExists = require_path_exists().pathExists;
function createFile(file, callback) {
function makeFile() {
- fs6.writeFile(file, "", (err) => {
+ fs8.writeFile(file, "", (err) => {
if (err)
return callback(err);
callback();
});
}
- fs6.stat(file, (err, stats) => {
+ fs8.stat(file, (err, stats) => {
if (!err && stats.isFile())
return callback();
- const dir = path8.dirname(file);
+ const dir = path10.dirname(file);
pathExists(dir, (err2, dirExists) => {
if (err2)
return callback(err2);
if (dirExists)
return makeFile();
- mkdir3.mkdirs(dir, (err3) => {
+ mkdir4.mkdirs(dir, (err3) => {
if (err3)
return callback(err3);
makeFile();
@@ -11239,16 +11263,16 @@ var require_file = __commonJS({
function createFileSync(file) {
let stats;
try {
- stats = fs6.statSync(file);
+ stats = fs8.statSync(file);
} catch (e) {
}
if (stats && stats.isFile())
return;
- const dir = path8.dirname(file);
- if (!fs6.existsSync(dir)) {
- mkdir3.mkdirsSync(dir);
+ const dir = path10.dirname(file);
+ if (!fs8.existsSync(dir)) {
+ mkdir4.mkdirsSync(dir);
}
- fs6.writeFileSync(file, "");
+ fs8.writeFileSync(file, "");
}
module2.exports = {
createFile: u(createFile),
@@ -11262,13 +11286,13 @@ var require_link = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/ensure/link.js"(exports, module2) {
"use strict";
var u = require_universalify().fromCallback;
- var path8 = require("path");
- var fs6 = require_graceful_fs();
- var mkdir3 = require_mkdirs2();
+ var path10 = require("path");
+ var fs8 = require_graceful_fs();
+ var mkdir4 = require_mkdirs2();
var pathExists = require_path_exists().pathExists;
function createLink(srcpath, dstpath, callback) {
function makeLink(srcpath2, dstpath2) {
- fs6.link(srcpath2, dstpath2, (err) => {
+ fs8.link(srcpath2, dstpath2, (err) => {
if (err)
return callback(err);
callback(null);
@@ -11279,18 +11303,18 @@ var require_link = __commonJS({
return callback(err);
if (destinationExists)
return callback(null);
- fs6.lstat(srcpath, (err2) => {
+ fs8.lstat(srcpath, (err2) => {
if (err2) {
err2.message = err2.message.replace("lstat", "ensureLink");
return callback(err2);
}
- const dir = path8.dirname(dstpath);
+ const dir = path10.dirname(dstpath);
pathExists(dir, (err3, dirExists) => {
if (err3)
return callback(err3);
if (dirExists)
return makeLink(srcpath, dstpath);
- mkdir3.mkdirs(dir, (err4) => {
+ mkdir4.mkdirs(dir, (err4) => {
if (err4)
return callback(err4);
makeLink(srcpath, dstpath);
@@ -11300,21 +11324,21 @@ var require_link = __commonJS({
});
}
function createLinkSync(srcpath, dstpath) {
- const destinationExists = fs6.existsSync(dstpath);
+ const destinationExists = fs8.existsSync(dstpath);
if (destinationExists)
return void 0;
try {
- fs6.lstatSync(srcpath);
+ fs8.lstatSync(srcpath);
} catch (err) {
err.message = err.message.replace("lstat", "ensureLink");
throw err;
}
- const dir = path8.dirname(dstpath);
- const dirExists = fs6.existsSync(dir);
+ const dir = path10.dirname(dstpath);
+ const dirExists = fs8.existsSync(dir);
if (dirExists)
- return fs6.linkSync(srcpath, dstpath);
- mkdir3.mkdirsSync(dir);
- return fs6.linkSync(srcpath, dstpath);
+ return fs8.linkSync(srcpath, dstpath);
+ mkdir4.mkdirsSync(dir);
+ return fs8.linkSync(srcpath, dstpath);
}
module2.exports = {
createLink: u(createLink),
@@ -11327,12 +11351,12 @@ var require_link = __commonJS({
var require_symlink_paths = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/ensure/symlink-paths.js"(exports, module2) {
"use strict";
- var path8 = require("path");
- var fs6 = require_graceful_fs();
+ var path10 = require("path");
+ var fs8 = require_graceful_fs();
var pathExists = require_path_exists().pathExists;
function symlinkPaths(srcpath, dstpath, callback) {
- if (path8.isAbsolute(srcpath)) {
- return fs6.lstat(srcpath, (err) => {
+ if (path10.isAbsolute(srcpath)) {
+ return fs8.lstat(srcpath, (err) => {
if (err) {
err.message = err.message.replace("lstat", "ensureSymlink");
return callback(err);
@@ -11343,8 +11367,8 @@ var require_symlink_paths = __commonJS({
});
});
} else {
- const dstdir = path8.dirname(dstpath);
- const relativeToDst = path8.join(dstdir, srcpath);
+ const dstdir = path10.dirname(dstpath);
+ const relativeToDst = path10.join(dstdir, srcpath);
return pathExists(relativeToDst, (err, exists) => {
if (err)
return callback(err);
@@ -11354,14 +11378,14 @@ var require_symlink_paths = __commonJS({
"toDst": srcpath
});
} else {
- return fs6.lstat(srcpath, (err2) => {
+ return fs8.lstat(srcpath, (err2) => {
if (err2) {
err2.message = err2.message.replace("lstat", "ensureSymlink");
return callback(err2);
}
return callback(null, {
"toCwd": srcpath,
- "toDst": path8.relative(dstdir, srcpath)
+ "toDst": path10.relative(dstdir, srcpath)
});
});
}
@@ -11370,8 +11394,8 @@ var require_symlink_paths = __commonJS({
}
function symlinkPathsSync(srcpath, dstpath) {
let exists;
- if (path8.isAbsolute(srcpath)) {
- exists = fs6.existsSync(srcpath);
+ if (path10.isAbsolute(srcpath)) {
+ exists = fs8.existsSync(srcpath);
if (!exists)
throw new Error("absolute srcpath does not exist");
return {
@@ -11379,21 +11403,21 @@ var require_symlink_paths = __commonJS({
"toDst": srcpath
};
} else {
- const dstdir = path8.dirname(dstpath);
- const relativeToDst = path8.join(dstdir, srcpath);
- exists = fs6.existsSync(relativeToDst);
+ const dstdir = path10.dirname(dstpath);
+ const relativeToDst = path10.join(dstdir, srcpath);
+ exists = fs8.existsSync(relativeToDst);
if (exists) {
return {
"toCwd": relativeToDst,
"toDst": srcpath
};
} else {
- exists = fs6.existsSync(srcpath);
+ exists = fs8.existsSync(srcpath);
if (!exists)
throw new Error("relative srcpath does not exist");
return {
"toCwd": srcpath,
- "toDst": path8.relative(dstdir, srcpath)
+ "toDst": path10.relative(dstdir, srcpath)
};
}
}
@@ -11409,13 +11433,13 @@ var require_symlink_paths = __commonJS({
var require_symlink_type = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/ensure/symlink-type.js"(exports, module2) {
"use strict";
- var fs6 = require_graceful_fs();
+ var fs8 = require_graceful_fs();
function symlinkType(srcpath, type, callback) {
callback = typeof type === "function" ? type : callback;
type = typeof type === "function" ? false : type;
if (type)
return callback(null, type);
- fs6.lstat(srcpath, (err, stats) => {
+ fs8.lstat(srcpath, (err, stats) => {
if (err)
return callback(null, "file");
type = stats && stats.isDirectory() ? "dir" : "file";
@@ -11427,7 +11451,7 @@ var require_symlink_type = __commonJS({
if (type)
return type;
try {
- stats = fs6.lstatSync(srcpath);
+ stats = fs8.lstatSync(srcpath);
} catch (e) {
return "file";
}
@@ -11445,8 +11469,8 @@ var require_symlink = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/ensure/symlink.js"(exports, module2) {
"use strict";
var u = require_universalify().fromCallback;
- var path8 = require("path");
- var fs6 = require_graceful_fs();
+ var path10 = require("path");
+ var fs8 = require_graceful_fs();
var _mkdirs = require_mkdirs2();
var mkdirs = _mkdirs.mkdirs;
var mkdirsSync = _mkdirs.mkdirsSync;
@@ -11472,16 +11496,16 @@ var require_symlink = __commonJS({
symlinkType(relative.toCwd, type, (err3, type2) => {
if (err3)
return callback(err3);
- const dir = path8.dirname(dstpath);
+ const dir = path10.dirname(dstpath);
pathExists(dir, (err4, dirExists) => {
if (err4)
return callback(err4);
if (dirExists)
- return fs6.symlink(srcpath, dstpath, type2, callback);
+ return fs8.symlink(srcpath, dstpath, type2, callback);
mkdirs(dir, (err5) => {
if (err5)
return callback(err5);
- fs6.symlink(srcpath, dstpath, type2, callback);
+ fs8.symlink(srcpath, dstpath, type2, callback);
});
});
});
@@ -11489,18 +11513,18 @@ var require_symlink = __commonJS({
});
}
function createSymlinkSync(srcpath, dstpath, type) {
- const destinationExists = fs6.existsSync(dstpath);
+ const destinationExists = fs8.existsSync(dstpath);
if (destinationExists)
return void 0;
const relative = symlinkPathsSync(srcpath, dstpath);
srcpath = relative.toDst;
type = symlinkTypeSync(relative.toCwd, type);
- const dir = path8.dirname(dstpath);
- const exists = fs6.existsSync(dir);
+ const dir = path10.dirname(dstpath);
+ const exists = fs8.existsSync(dir);
if (exists)
- return fs6.symlinkSync(srcpath, dstpath, type);
+ return fs8.symlinkSync(srcpath, dstpath, type);
mkdirsSync(dir);
- return fs6.symlinkSync(srcpath, dstpath, type);
+ return fs8.symlinkSync(srcpath, dstpath, type);
}
module2.exports = {
createSymlink: u(createSymlink),
@@ -11554,12 +11578,12 @@ var require_jsonfile = __commonJS({
options = { encoding: options };
}
options = options || {};
- var fs6 = options.fs || _fs;
+ var fs8 = options.fs || _fs;
var shouldThrow = true;
if ("throws" in options) {
shouldThrow = options.throws;
}
- fs6.readFile(file, options, function(err, data) {
+ fs8.readFile(file, options, function(err, data) {
if (err)
return callback(err);
data = stripBom(data);
@@ -11582,13 +11606,13 @@ var require_jsonfile = __commonJS({
if (typeof options === "string") {
options = { encoding: options };
}
- var fs6 = options.fs || _fs;
+ var fs8 = options.fs || _fs;
var shouldThrow = true;
if ("throws" in options) {
shouldThrow = options.throws;
}
try {
- var content = fs6.readFileSync(file, options);
+ var content = fs8.readFileSync(file, options);
content = stripBom(content);
return JSON.parse(content, options.reviver);
} catch (err) {
@@ -11620,7 +11644,7 @@ var require_jsonfile = __commonJS({
options = {};
}
options = options || {};
- var fs6 = options.fs || _fs;
+ var fs8 = options.fs || _fs;
var str = "";
try {
str = stringify(obj, options);
@@ -11629,13 +11653,13 @@ var require_jsonfile = __commonJS({
callback(err, null);
return;
}
- fs6.writeFile(file, str, options, callback);
+ fs8.writeFile(file, str, options, callback);
}
function writeFileSync(file, obj, options) {
options = options || {};
- var fs6 = options.fs || _fs;
+ var fs8 = options.fs || _fs;
var str = stringify(obj, options);
- return fs6.writeFileSync(file, str, options);
+ return fs8.writeFileSync(file, str, options);
}
function stripBom(content) {
if (Buffer.isBuffer(content))
@@ -11673,8 +11697,8 @@ var require_jsonfile2 = __commonJS({
var require_output_json = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/json/output-json.js"(exports, module2) {
"use strict";
- var path8 = require("path");
- var mkdir3 = require_mkdirs2();
+ var path10 = require("path");
+ var mkdir4 = require_mkdirs2();
var pathExists = require_path_exists().pathExists;
var jsonFile = require_jsonfile2();
function outputJson(file, data, options, callback) {
@@ -11682,13 +11706,13 @@ var require_output_json = __commonJS({
callback = options;
options = {};
}
- const dir = path8.dirname(file);
+ const dir = path10.dirname(file);
pathExists(dir, (err, itDoes) => {
if (err)
return callback(err);
if (itDoes)
return jsonFile.writeJson(file, data, options, callback);
- mkdir3.mkdirs(dir, (err2) => {
+ mkdir4.mkdirs(dir, (err2) => {
if (err2)
return callback(err2);
jsonFile.writeJson(file, data, options, callback);
@@ -11703,14 +11727,14 @@ var require_output_json = __commonJS({
var require_output_json_sync = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/json/output-json-sync.js"(exports, module2) {
"use strict";
- var fs6 = require_graceful_fs();
- var path8 = require("path");
- var mkdir3 = require_mkdirs2();
+ var fs8 = require_graceful_fs();
+ var path10 = require("path");
+ var mkdir4 = require_mkdirs2();
var jsonFile = require_jsonfile2();
function outputJsonSync(file, data, options) {
- const dir = path8.dirname(file);
- if (!fs6.existsSync(dir)) {
- mkdir3.mkdirsSync(dir);
+ const dir = path10.dirname(file);
+ if (!fs8.existsSync(dir)) {
+ mkdir4.mkdirsSync(dir);
}
jsonFile.writeJsonSync(file, data, options);
}
@@ -11740,8 +11764,8 @@ var require_json = __commonJS({
var require_move_sync = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/move-sync/move-sync.js"(exports, module2) {
"use strict";
- var fs6 = require_graceful_fs();
- var path8 = require("path");
+ var fs8 = require_graceful_fs();
+ var path10 = require("path");
var copySync = require_copy_sync2().copySync;
var removeSync = require_remove().removeSync;
var mkdirpSync = require_mkdirs2().mkdirpSync;
@@ -11751,7 +11775,7 @@ var require_move_sync = __commonJS({
const overwrite = opts.overwrite || opts.clobber || false;
const { srcStat } = stat.checkPathsSync(src, dest, "move");
stat.checkParentPathsSync(src, srcStat, dest, "move");
- mkdirpSync(path8.dirname(dest));
+ mkdirpSync(path10.dirname(dest));
return doRename(src, dest, overwrite);
}
function doRename(src, dest, overwrite) {
@@ -11759,13 +11783,13 @@ var require_move_sync = __commonJS({
removeSync(dest);
return rename(src, dest, overwrite);
}
- if (fs6.existsSync(dest))
+ if (fs8.existsSync(dest))
throw new Error("dest already exists.");
return rename(src, dest, overwrite);
}
function rename(src, dest, overwrite) {
try {
- fs6.renameSync(src, dest);
+ fs8.renameSync(src, dest);
} catch (err) {
if (err.code !== "EXDEV")
throw err;
@@ -11798,8 +11822,8 @@ var require_move_sync2 = __commonJS({
var require_move = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/move/move.js"(exports, module2) {
"use strict";
- var fs6 = require_graceful_fs();
- var path8 = require("path");
+ var fs8 = require_graceful_fs();
+ var path10 = require("path");
var copy = require_copy2().copy;
var remove = require_remove().remove;
var mkdirp = require_mkdirs2().mkdirp;
@@ -11818,7 +11842,7 @@ var require_move = __commonJS({
stat.checkParentPaths(src, srcStat, dest, "move", (err2) => {
if (err2)
return cb(err2);
- mkdirp(path8.dirname(dest), (err3) => {
+ mkdirp(path10.dirname(dest), (err3) => {
if (err3)
return cb(err3);
return doRename(src, dest, overwrite, cb);
@@ -11843,7 +11867,7 @@ var require_move = __commonJS({
});
}
function rename(src, dest, overwrite, cb) {
- fs6.rename(src, dest, (err) => {
+ fs8.rename(src, dest, (err) => {
if (!err)
return cb();
if (err.code !== "EXDEV")
@@ -11882,35 +11906,35 @@ var require_output = __commonJS({
".yarn/cache/fs-extra-npm-8.1.0-197473387f-cfdc1f2b8d.zip/node_modules/fs-extra/lib/output/index.js"(exports, module2) {
"use strict";
var u = require_universalify().fromCallback;
- var fs6 = require_graceful_fs();
- var path8 = require("path");
- var mkdir3 = require_mkdirs2();
+ var fs8 = require_graceful_fs();
+ var path10 = require("path");
+ var mkdir4 = require_mkdirs2();
var pathExists = require_path_exists().pathExists;
function outputFile(file, data, encoding, callback) {
if (typeof encoding === "function") {
callback = encoding;
encoding = "utf8";
}
- const dir = path8.dirname(file);
+ const dir = path10.dirname(file);
pathExists(dir, (err, itDoes) => {
if (err)
return callback(err);
if (itDoes)
- return fs6.writeFile(file, data, encoding, callback);
- mkdir3.mkdirs(dir, (err2) => {
+ return fs8.writeFile(file, data, encoding, callback);
+ mkdir4.mkdirs(dir, (err2) => {
if (err2)
return callback(err2);
- fs6.writeFile(file, data, encoding, callback);
+ fs8.writeFile(file, data, encoding, callback);
});
});
}
function outputFileSync(file, ...args) {
- const dir = path8.dirname(file);
- if (fs6.existsSync(dir)) {
- return fs6.writeFileSync(file, ...args);
+ const dir = path10.dirname(file);
+ if (fs8.existsSync(dir)) {
+ return fs8.writeFileSync(file, ...args);
}
- mkdir3.mkdirsSync(dir);
- fs6.writeFileSync(file, ...args);
+ mkdir4.mkdirsSync(dir);
+ fs8.writeFileSync(file, ...args);
}
module2.exports = {
outputFile: u(outputFile),
@@ -11940,11 +11964,11 @@ var require_lib = __commonJS({
require_path_exists(),
require_remove()
);
- var fs6 = require("fs");
- if (Object.getOwnPropertyDescriptor(fs6, "promises")) {
+ var fs8 = require("fs");
+ if (Object.getOwnPropertyDescriptor(fs8, "promises")) {
Object.defineProperty(module2.exports, "promises", {
get() {
- return fs6.promises;
+ return fs8.promises;
}
});
}
@@ -13446,8 +13470,8 @@ var require_Client = __commonJS({
/**
* Set the working directory.
*/
- async cd(path8) {
- const validPath = await this.protectWhitespace(path8);
+ async cd(path10) {
+ const validPath = await this.protectWhitespace(path10);
return this.send("CWD " + validPath);
}
/**
@@ -13460,8 +13484,8 @@ var require_Client = __commonJS({
* Get the last modified time of a file. This is not supported by every FTP server, in which case
* calling this method will throw an exception.
*/
- async lastMod(path8) {
- const validPath = await this.protectWhitespace(path8);
+ async lastMod(path10) {
+ const validPath = await this.protectWhitespace(path10);
const res = await this.send(`MDTM ${validPath}`);
const date = res.message.slice(4);
return (0, parseListMLSD_1.parseMLSxDate)(date);
@@ -13469,8 +13493,8 @@ var require_Client = __commonJS({
/**
* Get the size of a file.
*/
- async size(path8) {
- const validPath = await this.protectWhitespace(path8);
+ async size(path10) {
+ const validPath = await this.protectWhitespace(path10);
const command = `SIZE ${validPath}`;
const res = await this.send(command);
const size = parseInt(res.message.slice(4), 10);
@@ -13497,8 +13521,8 @@ var require_Client = __commonJS({
* You can ignore FTP error return codes which won't throw an exception if e.g.
* the file doesn't exist.
*/
- async remove(path8, ignoreErrorCodes = false) {
- const validPath = await this.protectWhitespace(path8);
+ async remove(path10, ignoreErrorCodes = false) {
+ const validPath = await this.protectWhitespace(path10);
if (ignoreErrorCodes) {
return this.sendIgnoringError(`DELE ${validPath}`);
}
@@ -13652,8 +13676,8 @@ var require_Client = __commonJS({
*
* @param [path] Path to remote file or directory.
*/
- async list(path8 = "") {
- const validPath = await this.protectWhitespace(path8);
+ async list(path10 = "") {
+ const validPath = await this.protectWhitespace(path10);
let lastError;
for (const candidate of this.availableListCommands) {
const command = validPath === "" ? candidate : `${candidate} ${validPath}`;
@@ -13815,21 +13839,21 @@ var require_Client = __commonJS({
/**
* Remove an empty directory, will fail if not empty.
*/
- async removeEmptyDir(path8) {
- const validPath = await this.protectWhitespace(path8);
+ async removeEmptyDir(path10) {
+ const validPath = await this.protectWhitespace(path10);
return this.send(`RMD ${validPath}`);
}
/**
* FTP servers can't handle filenames that have leading whitespace. This method transforms
* a given path to fix that issue for most cases.
*/
- async protectWhitespace(path8) {
- if (!path8.startsWith(" ")) {
- return path8;
+ async protectWhitespace(path10) {
+ if (!path10.startsWith(" ")) {
+ return path10;
}
const pwd = await this.pwd();
const absolutePathPrefix = pwd.endsWith("/") ? pwd : pwd + "/";
- return absolutePathPrefix + path8;
+ return absolutePathPrefix + path10;
}
async _exitAtCurrentDirectory(func) {
const userDir = await this.pwd();
@@ -13906,11 +13930,11 @@ var require_Client = __commonJS({
}
};
exports.Client = Client;
- async function ensureLocalDirectory(path8) {
+ async function ensureLocalDirectory(path10) {
try {
- await fsStat(path8);
+ await fsStat(path10);
} catch (err) {
- await fsMkDir(path8, { recursive: true });
+ await fsMkDir(path10, { recursive: true });
}
}
async function ignoreError(func) {
@@ -14548,23 +14572,23 @@ var require_estraverse = __commonJS({
return false;
}
};
- function Element(node, path8, wrap, ref) {
+ function Element(node, path10, wrap, ref) {
this.node = node;
- this.path = path8;
+ this.path = path10;
this.wrap = wrap;
this.ref = ref;
}
function Controller() {
}
- Controller.prototype.path = function path8() {
+ Controller.prototype.path = function path10() {
var i, iz, j, jz, result, element;
- function addToPath(result2, path9) {
- if (Array.isArray(path9)) {
- for (j = 0, jz = path9.length; j < jz; ++j) {
- result2.push(path9[j]);
+ function addToPath(result2, path11) {
+ if (Array.isArray(path11)) {
+ for (j = 0, jz = path11.length; j < jz; ++j) {
+ result2.push(path11[j]);
}
} else {
- result2.push(path9);
+ result2.push(path11);
}
}
if (!this.__current.path) {
@@ -15436,16 +15460,16 @@ var require_util2 = __commonJS({
}
exports.urlGenerate = urlGenerate;
function normalize(aPath) {
- var path8 = aPath;
+ var path10 = aPath;
var url = urlParse(aPath);
if (url) {
if (!url.path) {
return aPath;
}
- path8 = url.path;
+ path10 = url.path;
}
- var isAbsolute = exports.isAbsolute(path8);
- var parts = path8.split(/\/+/);
+ var isAbsolute = exports.isAbsolute(path10);
+ var parts = path10.split(/\/+/);
for (var part, up = 0, i = parts.length - 1; i >= 0; i--) {
part = parts[i];
if (part === ".") {
@@ -15462,15 +15486,15 @@ var require_util2 = __commonJS({
}
}
}
- path8 = parts.join("/");
- if (path8 === "") {
- path8 = isAbsolute ? "/" : ".";
+ path10 = parts.join("/");
+ if (path10 === "") {
+ path10 = isAbsolute ? "/" : ".";
}
if (url) {
- url.path = path8;
+ url.path = path10;
return urlGenerate(url);
}
- return path8;
+ return path10;
}
exports.normalize = normalize;
function join2(aRoot, aPath) {
@@ -25397,9 +25421,10 @@ var require_esprima = __commonJS({
}
});
-// .yarn/cache/tslib-npm-2.5.2-3f1b58afbb-ed22e23f3d.zip/node_modules/tslib/tslib.es6.js
+// .yarn/cache/tslib-npm-2.6.0-4d336a6824-702dfe42c8.zip/node_modules/tslib/tslib.es6.mjs
var tslib_es6_exports = {};
__export(tslib_es6_exports, {
+ __addDisposableResource: () => __addDisposableResource,
__assign: () => __assign,
__asyncDelegator: () => __asyncDelegator,
__asyncGenerator: () => __asyncGenerator,
@@ -25411,6 +25436,7 @@ __export(tslib_es6_exports, {
__classPrivateFieldSet: () => __classPrivateFieldSet,
__createBinding: () => __createBinding,
__decorate: () => __decorate,
+ __disposeResources: () => __disposeResources,
__esDecorate: () => __esDecorate,
__exportStar: () => __exportStar,
__extends: () => __extends,
@@ -25813,9 +25839,56 @@ function __classPrivateFieldIn(state, receiver) {
throw new TypeError("Cannot use 'in' operator on non-object");
return typeof state === "function" ? receiver === state : state.has(receiver);
}
-var extendStatics, __assign, __createBinding, __setModuleDefault, tslib_es6_default;
+function __addDisposableResource(env2, value, async) {
+ if (value !== null && value !== void 0) {
+ if (typeof value !== "object")
+ throw new TypeError("Object expected.");
+ var dispose;
+ if (async) {
+ if (!Symbol.asyncDispose)
+ throw new TypeError("Symbol.asyncDispose is not defined.");
+ dispose = value[Symbol.asyncDispose];
+ }
+ if (dispose === void 0) {
+ if (!Symbol.dispose)
+ throw new TypeError("Symbol.dispose is not defined.");
+ dispose = value[Symbol.dispose];
+ }
+ if (typeof dispose !== "function")
+ throw new TypeError("Object not disposable.");
+ env2.stack.push({ value, dispose, async });
+ } else if (async) {
+ env2.stack.push({ async: true });
+ }
+ return value;
+}
+function __disposeResources(env2) {
+ function fail(e) {
+ env2.error = env2.hasError ? new _SuppressedError(e, env2.error, "An error was suppressed during disposal.") : e;
+ env2.hasError = true;
+ }
+ function next() {
+ while (env2.stack.length) {
+ var rec = env2.stack.pop();
+ try {
+ var result = rec.dispose && rec.dispose.call(rec.value);
+ if (rec.async)
+ return Promise.resolve(result).then(next, function(e) {
+ fail(e);
+ return next();
+ });
+ } catch (e) {
+ fail(e);
+ }
+ }
+ if (env2.hasError)
+ throw env2.error;
+ }
+ return next();
+}
+var extendStatics, __assign, __createBinding, __setModuleDefault, _SuppressedError, tslib_es6_default;
var init_tslib_es6 = __esm({
- ".yarn/cache/tslib-npm-2.5.2-3f1b58afbb-ed22e23f3d.zip/node_modules/tslib/tslib.es6.js"() {
+ ".yarn/cache/tslib-npm-2.6.0-4d336a6824-702dfe42c8.zip/node_modules/tslib/tslib.es6.mjs"() {
extendStatics = function(d, b) {
extendStatics = Object.setPrototypeOf || { __proto__: [] } instanceof Array && function(d2, b2) {
d2.__proto__ = b2;
@@ -25858,6 +25931,10 @@ var init_tslib_es6 = __esm({
} : function(o, v) {
o["default"] = v;
};
+ _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function(error, suppressed, message) {
+ var e = new Error(message);
+ return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e;
+ };
tslib_es6_default = {
__extends,
__assign,
@@ -25883,7 +25960,9 @@ var init_tslib_es6 = __esm({
__importDefault,
__classPrivateFieldGet,
__classPrivateFieldSet,
- __classPrivateFieldIn
+ __classPrivateFieldIn,
+ __addDisposableResource,
+ __disposeResources
};
}
});
@@ -26604,16 +26683,16 @@ var require_path = __commonJS({
this.__childCache = null;
};
var Pp = Path.prototype;
- function getChildCache(path8) {
- return path8.__childCache || (path8.__childCache = /* @__PURE__ */ Object.create(null));
+ function getChildCache(path10) {
+ return path10.__childCache || (path10.__childCache = /* @__PURE__ */ Object.create(null));
}
- function getChildPath(path8, name) {
- var cache = getChildCache(path8);
- var actualChildValue = path8.getValueProperty(name);
+ function getChildPath(path10, name) {
+ var cache = getChildCache(path10);
+ var actualChildValue = path10.getValueProperty(name);
var childPath = cache[name];
if (!hasOwn.call(cache, name) || // Ensure consistency between cache and reality.
childPath.value !== actualChildValue) {
- childPath = cache[name] = new path8.constructor(actualChildValue, path8, name);
+ childPath = cache[name] = new path10.constructor(actualChildValue, path10, name);
}
return childPath;
}
@@ -26625,12 +26704,12 @@ var require_path = __commonJS({
for (var _i = 0; _i < arguments.length; _i++) {
names[_i] = arguments[_i];
}
- var path8 = this;
+ var path10 = this;
var count = names.length;
for (var i = 0; i < count; ++i) {
- path8 = getChildPath(path8, names[i]);
+ path10 = getChildPath(path10, names[i]);
}
- return path8;
+ return path10;
};
Pp.each = function each(callback, context) {
var childPaths = [];
@@ -26666,12 +26745,12 @@ var require_path = __commonJS({
};
function emptyMoves() {
}
- function getMoves(path8, offset, start, end) {
- isArray2.assert(path8.value);
+ function getMoves(path10, offset, start, end) {
+ isArray2.assert(path10.value);
if (offset === 0) {
return emptyMoves;
}
- var length = path8.value.length;
+ var length = path10.value.length;
if (length < 1) {
return emptyMoves;
}
@@ -26689,10 +26768,10 @@ var require_path = __commonJS({
isNumber2.assert(start);
isNumber2.assert(end);
var moves = /* @__PURE__ */ Object.create(null);
- var cache = getChildCache(path8);
+ var cache = getChildCache(path10);
for (var i = start; i < end; ++i) {
- if (hasOwn.call(path8.value, i)) {
- var childPath = path8.get(i);
+ if (hasOwn.call(path10.value, i)) {
+ var childPath = path10.get(i);
if (childPath.name !== i) {
throw new Error("");
}
@@ -26710,7 +26789,7 @@ var require_path = __commonJS({
throw new Error("");
}
cache[newIndex2] = childPath2;
- path8.value[newIndex2] = childPath2.value;
+ path10.value[newIndex2] = childPath2.value;
}
};
}
@@ -26785,34 +26864,34 @@ var require_path = __commonJS({
}
return pp.insertAt.apply(pp, insertAtArgs);
};
- function repairRelationshipWithParent(path8) {
- if (!(path8 instanceof Path)) {
+ function repairRelationshipWithParent(path10) {
+ if (!(path10 instanceof Path)) {
throw new Error("");
}
- var pp = path8.parentPath;
+ var pp = path10.parentPath;
if (!pp) {
- return path8;
+ return path10;
}
var parentValue = pp.value;
var parentCache = getChildCache(pp);
- if (parentValue[path8.name] === path8.value) {
- parentCache[path8.name] = path8;
+ if (parentValue[path10.name] === path10.value) {
+ parentCache[path10.name] = path10;
} else if (isArray2.check(parentValue)) {
- var i = parentValue.indexOf(path8.value);
+ var i = parentValue.indexOf(path10.value);
if (i >= 0) {
- parentCache[path8.name = i] = path8;
+ parentCache[path10.name = i] = path10;
}
} else {
- parentValue[path8.name] = path8.value;
- parentCache[path8.name] = path8;
+ parentValue[path10.name] = path10.value;
+ parentCache[path10.name] = path10;
}
- if (parentValue[path8.name] !== path8.value) {
+ if (parentValue[path10.name] !== path10.value) {
throw new Error("");
}
- if (path8.parentPath.get(path8.name) !== path8) {
+ if (path10.parentPath.get(path10.name) !== path10) {
throw new Error("");
}
- return path8;
+ return path10;
}
Pp.replace = function replace(replacement) {
var results = [];
@@ -26892,11 +26971,11 @@ var require_scope = __commonJS({
var Expression = namedTypes.Expression;
var isArray2 = types.builtInTypes.array;
var b = types.builders;
- var Scope = function Scope2(path8, parentScope) {
+ var Scope = function Scope2(path10, parentScope) {
if (!(this instanceof Scope2)) {
throw new Error("Scope constructor cannot be invoked without 'new'");
}
- ScopeType.assert(path8.value);
+ ScopeType.assert(path10.value);
var depth;
if (parentScope) {
if (!(parentScope instanceof Scope2)) {
@@ -26908,8 +26987,8 @@ var require_scope = __commonJS({
depth = 0;
}
Object.defineProperties(this, {
- path: { value: path8 },
- node: { value: path8.value },
+ path: { value: path10 },
+ node: { value: path10.value },
isGlobal: { value: !parentScope, enumerable: true },
depth: { value: depth },
parent: { value: parentScope },
@@ -26984,50 +27063,50 @@ var require_scope = __commonJS({
this.scan();
return this.types;
};
- function scanScope(path8, bindings, scopeTypes2) {
- var node = path8.value;
+ function scanScope(path10, bindings, scopeTypes2) {
+ var node = path10.value;
ScopeType.assert(node);
if (namedTypes.CatchClause.check(node)) {
- var param = path8.get("param");
+ var param = path10.get("param");
if (param.value) {
addPattern(param, bindings);
}
} else {
- recursiveScanScope(path8, bindings, scopeTypes2);
+ recursiveScanScope(path10, bindings, scopeTypes2);
}
}
- function recursiveScanScope(path8, bindings, scopeTypes2) {
- var node = path8.value;
- if (path8.parent && namedTypes.FunctionExpression.check(path8.parent.node) && path8.parent.node.id) {
- addPattern(path8.parent.get("id"), bindings);
+ function recursiveScanScope(path10, bindings, scopeTypes2) {
+ var node = path10.value;
+ if (path10.parent && namedTypes.FunctionExpression.check(path10.parent.node) && path10.parent.node.id) {
+ addPattern(path10.parent.get("id"), bindings);
}
if (!node) {
} else if (isArray2.check(node)) {
- path8.each(function(childPath) {
+ path10.each(function(childPath) {
recursiveScanChild(childPath, bindings, scopeTypes2);
});
} else if (namedTypes.Function.check(node)) {
- path8.get("params").each(function(paramPath) {
+ path10.get("params").each(function(paramPath) {
addPattern(paramPath, bindings);
});
- recursiveScanChild(path8.get("body"), bindings, scopeTypes2);
+ recursiveScanChild(path10.get("body"), bindings, scopeTypes2);
} else if (namedTypes.TypeAlias && namedTypes.TypeAlias.check(node) || namedTypes.InterfaceDeclaration && namedTypes.InterfaceDeclaration.check(node) || namedTypes.TSTypeAliasDeclaration && namedTypes.TSTypeAliasDeclaration.check(node) || namedTypes.TSInterfaceDeclaration && namedTypes.TSInterfaceDeclaration.check(node)) {
- addTypePattern(path8.get("id"), scopeTypes2);
+ addTypePattern(path10.get("id"), scopeTypes2);
} else if (namedTypes.VariableDeclarator.check(node)) {
- addPattern(path8.get("id"), bindings);
- recursiveScanChild(path8.get("init"), bindings, scopeTypes2);
+ addPattern(path10.get("id"), bindings);
+ recursiveScanChild(path10.get("init"), bindings, scopeTypes2);
} else if (node.type === "ImportSpecifier" || node.type === "ImportNamespaceSpecifier" || node.type === "ImportDefaultSpecifier") {
addPattern(
// Esprima used to use the .name field to refer to the local
// binding identifier for ImportSpecifier nodes, but .id for
// ImportNamespaceSpecifier and ImportDefaultSpecifier nodes.
// ESTree/Acorn/ESpree use .local for all three node types.
- path8.get(node.local ? "local" : node.name ? "name" : "id"),
+ path10.get(node.local ? "local" : node.name ? "name" : "id"),
bindings
);
} else if (Node.check(node) && !Expression.check(node)) {
types.eachField(node, function(name, child) {
- var childPath = path8.get(name);
+ var childPath = path10.get(name);
if (!pathHasValue(childPath, child)) {
throw new Error("");
}
@@ -27035,34 +27114,34 @@ var require_scope = __commonJS({
});
}
}
- function pathHasValue(path8, value) {
- if (path8.value === value) {
+ function pathHasValue(path10, value) {
+ if (path10.value === value) {
return true;
}
- if (Array.isArray(path8.value) && path8.value.length === 0 && Array.isArray(value) && value.length === 0) {
+ if (Array.isArray(path10.value) && path10.value.length === 0 && Array.isArray(value) && value.length === 0) {
return true;
}
return false;
}
- function recursiveScanChild(path8, bindings, scopeTypes2) {
- var node = path8.value;
+ function recursiveScanChild(path10, bindings, scopeTypes2) {
+ var node = path10.value;
if (!node || Expression.check(node)) {
} else if (namedTypes.FunctionDeclaration.check(node) && node.id !== null) {
- addPattern(path8.get("id"), bindings);
+ addPattern(path10.get("id"), bindings);
} else if (namedTypes.ClassDeclaration && namedTypes.ClassDeclaration.check(node)) {
- addPattern(path8.get("id"), bindings);
+ addPattern(path10.get("id"), bindings);
} else if (ScopeType.check(node)) {
if (namedTypes.CatchClause.check(node) && // TODO Broaden this to accept any pattern.
namedTypes.Identifier.check(node.param)) {
var catchParamName = node.param.name;
var hadBinding = hasOwn.call(bindings, catchParamName);
- recursiveScanScope(path8.get("body"), bindings, scopeTypes2);
+ recursiveScanScope(path10.get("body"), bindings, scopeTypes2);
if (!hadBinding) {
delete bindings[catchParamName];
}
}
} else {
- recursiveScanScope(path8, bindings, scopeTypes2);
+ recursiveScanScope(path10, bindings, scopeTypes2);
}
}
function addPattern(patternPath, bindings) {
@@ -27398,53 +27477,53 @@ var require_node_path = __commonJS({
NPp.firstInStatement = function() {
return firstInStatement(this);
};
- function firstInStatement(path8) {
- for (var node, parent; path8.parent; path8 = path8.parent) {
- node = path8.node;
- parent = path8.parent.node;
- if (n.BlockStatement.check(parent) && path8.parent.name === "body" && path8.name === 0) {
+ function firstInStatement(path10) {
+ for (var node, parent; path10.parent; path10 = path10.parent) {
+ node = path10.node;
+ parent = path10.parent.node;
+ if (n.BlockStatement.check(parent) && path10.parent.name === "body" && path10.name === 0) {
if (parent.body[0] !== node) {
throw new Error("Nodes must be equal");
}
return true;
}
- if (n.ExpressionStatement.check(parent) && path8.name === "expression") {
+ if (n.ExpressionStatement.check(parent) && path10.name === "expression") {
if (parent.expression !== node) {
throw new Error("Nodes must be equal");
}
return true;
}
- if (n.SequenceExpression.check(parent) && path8.parent.name === "expressions" && path8.name === 0) {
+ if (n.SequenceExpression.check(parent) && path10.parent.name === "expressions" && path10.name === 0) {
if (parent.expressions[0] !== node) {
throw new Error("Nodes must be equal");
}
continue;
}
- if (n.CallExpression.check(parent) && path8.name === "callee") {
+ if (n.CallExpression.check(parent) && path10.name === "callee") {
if (parent.callee !== node) {
throw new Error("Nodes must be equal");
}
continue;
}
- if (n.MemberExpression.check(parent) && path8.name === "object") {
+ if (n.MemberExpression.check(parent) && path10.name === "object") {
if (parent.object !== node) {
throw new Error("Nodes must be equal");
}
continue;
}
- if (n.ConditionalExpression.check(parent) && path8.name === "test") {
+ if (n.ConditionalExpression.check(parent) && path10.name === "test") {
if (parent.test !== node) {
throw new Error("Nodes must be equal");
}
continue;
}
- if (isBinary(parent) && path8.name === "left") {
+ if (isBinary(parent) && path10.name === "left") {
if (parent.left !== node) {
throw new Error("Nodes must be equal");
}
continue;
}
- if (n.UnaryExpression.check(parent) && !parent.prefix && path8.name === "argument") {
+ if (n.UnaryExpression.check(parent) && !parent.prefix && path10.name === "argument") {
if (parent.argument !== node) {
throw new Error("Nodes must be equal");
}
@@ -27614,36 +27693,36 @@ var require_path_visitor = __commonJS({
};
PVp.reset = function(_path) {
};
- PVp.visitWithoutReset = function(path8) {
+ PVp.visitWithoutReset = function(path10) {
if (this instanceof this.Context) {
- return this.visitor.visitWithoutReset(path8);
+ return this.visitor.visitWithoutReset(path10);
}
- if (!(path8 instanceof NodePath)) {
+ if (!(path10 instanceof NodePath)) {
throw new Error("");
}
- var value = path8.value;
+ var value = path10.value;
var methodName = value && typeof value === "object" && typeof value.type === "string" && this._methodNameTable[value.type];
if (methodName) {
- var context = this.acquireContext(path8);
+ var context = this.acquireContext(path10);
try {
return context.invokeVisitorMethod(methodName);
} finally {
this.releaseContext(context);
}
} else {
- return visitChildren(path8, this);
+ return visitChildren(path10, this);
}
};
- function visitChildren(path8, visitor) {
- if (!(path8 instanceof NodePath)) {
+ function visitChildren(path10, visitor) {
+ if (!(path10 instanceof NodePath)) {
throw new Error("");
}
if (!(visitor instanceof PathVisitor)) {
throw new Error("");
}
- var value = path8.value;
+ var value = path10.value;
if (isArray2.check(value)) {
- path8.each(visitor.visitWithoutReset, visitor);
+ path10.each(visitor.visitWithoutReset, visitor);
} else if (!isObject2.check(value)) {
} else {
var childNames = types.getFieldNames(value);
@@ -27657,19 +27736,19 @@ var require_path_visitor = __commonJS({
if (!hasOwn.call(value, childName)) {
value[childName] = types.getFieldValue(value, childName);
}
- childPaths.push(path8.get(childName));
+ childPaths.push(path10.get(childName));
}
for (var i = 0; i < childCount; ++i) {
visitor.visitWithoutReset(childPaths[i]);
}
}
- return path8.value;
+ return path10.value;
}
- PVp.acquireContext = function(path8) {
+ PVp.acquireContext = function(path10) {
if (this._reusableContextStack.length === 0) {
- return new this.Context(path8);
+ return new this.Context(path10);
}
- return this._reusableContextStack.pop().reset(path8);
+ return this._reusableContextStack.pop().reset(path10);
};
PVp.releaseContext = function(context) {
if (!(context instanceof this.Context)) {
@@ -27685,14 +27764,14 @@ var require_path_visitor = __commonJS({
return this._changeReported;
};
function makeContextConstructor(visitor) {
- function Context(path8) {
+ function Context(path10) {
if (!(this instanceof Context)) {
throw new Error("");
}
if (!(this instanceof PathVisitor)) {
throw new Error("");
}
- if (!(path8 instanceof NodePath)) {
+ if (!(path10 instanceof NodePath)) {
throw new Error("");
}
Object.defineProperty(this, "visitor", {
@@ -27701,7 +27780,7 @@ var require_path_visitor = __commonJS({
enumerable: true,
configurable: false
});
- this.currentPath = path8;
+ this.currentPath = path10;
this.needToCallTraverse = true;
Object.seal(this);
}
@@ -27714,14 +27793,14 @@ var require_path_visitor = __commonJS({
return Context;
}
var sharedContextProtoMethods = /* @__PURE__ */ Object.create(null);
- sharedContextProtoMethods.reset = function reset(path8) {
+ sharedContextProtoMethods.reset = function reset(path10) {
if (!(this instanceof this.Context)) {
throw new Error("");
}
- if (!(path8 instanceof NodePath)) {
+ if (!(path10 instanceof NodePath)) {
throw new Error("");
}
- this.currentPath = path8;
+ this.currentPath = path10;
this.needToCallTraverse = true;
return this;
};
@@ -27744,34 +27823,34 @@ var require_path_visitor = __commonJS({
if (this.needToCallTraverse !== false) {
throw new Error("Must either call this.traverse or return false in " + methodName);
}
- var path8 = this.currentPath;
- return path8 && path8.value;
+ var path10 = this.currentPath;
+ return path10 && path10.value;
};
- sharedContextProtoMethods.traverse = function traverse(path8, newVisitor) {
+ sharedContextProtoMethods.traverse = function traverse(path10, newVisitor) {
if (!(this instanceof this.Context)) {
throw new Error("");
}
- if (!(path8 instanceof NodePath)) {
+ if (!(path10 instanceof NodePath)) {
throw new Error("");
}
if (!(this.currentPath instanceof NodePath)) {
throw new Error("");
}
this.needToCallTraverse = false;
- return visitChildren(path8, PathVisitor.fromMethodsObject(newVisitor || this.visitor));
+ return visitChildren(path10, PathVisitor.fromMethodsObject(newVisitor || this.visitor));
};
- sharedContextProtoMethods.visit = function visit(path8, newVisitor) {
+ sharedContextProtoMethods.visit = function visit(path10, newVisitor) {
if (!(this instanceof this.Context)) {
throw new Error("");
}
- if (!(path8 instanceof NodePath)) {
+ if (!(path10 instanceof NodePath)) {
throw new Error("");
}
if (!(this.currentPath instanceof NodePath)) {
throw new Error("");
}
this.needToCallTraverse = false;
- return PathVisitor.fromMethodsObject(newVisitor || this.visitor).visitWithoutReset(path8);
+ return PathVisitor.fromMethodsObject(newVisitor || this.visitor).visitWithoutReset(path10);
};
sharedContextProtoMethods.reportChanged = function reportChanged() {
this.visitor.reportChanged();
@@ -28965,22 +29044,22 @@ var require_main = __commonJS({
}
});
-// .yarn/cache/vm2-patch-e4c2a87b9d-d283b74b74.zip/node_modules/vm2/index.js
+// vm2/index.js
var require_vm2 = __commonJS({
- ".yarn/cache/vm2-patch-e4c2a87b9d-d283b74b74.zip/node_modules/vm2/index.js"(exports, module2) {
+ "vm2/index.js"(exports, module2) {
"use strict";
module2.exports = {
- VM: function VM() {
+ VM() {
},
- VMScript: function VMScript() {
+ VMScript() {
}
};
}
});
-// .yarn/cache/degenerator-npm-4.0.2-40f6f904e6-99337dd354.zip/node_modules/degenerator/dist/index.js
+// .yarn/cache/degenerator-npm-4.0.4-1f9c4b67eb-105bd28952.zip/node_modules/degenerator/dist/index.js
var require_dist8 = __commonJS({
- ".yarn/cache/degenerator-npm-4.0.2-40f6f904e6-99337dd354.zip/node_modules/degenerator/dist/index.js"(exports) {
+ ".yarn/cache/degenerator-npm-4.0.4-1f9c4b67eb-105bd28952.zip/node_modules/degenerator/dist/index.js"(exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.compile = exports.degenerator = void 0;
@@ -28999,10 +29078,10 @@ var require_dist8 = __commonJS({
do {
lastNamesLength = names.length;
(0, ast_types_1.visit)(ast, {
- visitVariableDeclaration(path8) {
- if (path8.node.declarations) {
- for (let i = 0; i < path8.node.declarations.length; i++) {
- const declaration = path8.node.declarations[i];
+ visitVariableDeclaration(path10) {
+ if (path10.node.declarations) {
+ for (let i = 0; i < path10.node.declarations.length; i++) {
+ const declaration = path10.node.declarations[i];
if (ast_types_1.namedTypes.VariableDeclarator.check(declaration) && ast_types_1.namedTypes.Identifier.check(declaration.init) && ast_types_1.namedTypes.Identifier.check(declaration.id) && checkName(declaration.init.name, names) && !checkName(declaration.id.name, names)) {
names.push(declaration.id.name);
}
@@ -29010,18 +29089,18 @@ var require_dist8 = __commonJS({
}
return false;
},
- visitAssignmentExpression(path8) {
- if (ast_types_1.namedTypes.Identifier.check(path8.node.left) && ast_types_1.namedTypes.Identifier.check(path8.node.right) && checkName(path8.node.right.name, names) && !checkName(path8.node.left.name, names)) {
- names.push(path8.node.left.name);
+ visitAssignmentExpression(path10) {
+ if (ast_types_1.namedTypes.Identifier.check(path10.node.left) && ast_types_1.namedTypes.Identifier.check(path10.node.right) && checkName(path10.node.right.name, names) && !checkName(path10.node.left.name, names)) {
+ names.push(path10.node.left.name);
}
return false;
},
- visitFunction(path8) {
- if (path8.node.id) {
+ visitFunction(path10) {
+ if (path10.node.id) {
let shouldDegenerate = false;
- (0, ast_types_1.visit)(path8.node, {
- visitCallExpression(path9) {
- if (checkNames(path9.node, names)) {
+ (0, ast_types_1.visit)(path10.node, {
+ visitCallExpression(path11) {
+ if (checkNames(path11.node, names)) {
shouldDegenerate = true;
}
return false;
@@ -29030,28 +29109,28 @@ var require_dist8 = __commonJS({
if (!shouldDegenerate) {
return false;
}
- path8.node.async = true;
- if (!checkName(path8.node.id.name, names)) {
- names.push(path8.node.id.name);
+ path10.node.async = true;
+ if (!checkName(path10.node.id.name, names)) {
+ names.push(path10.node.id.name);
}
}
- this.traverse(path8);
+ this.traverse(path10);
}
});
} while (lastNamesLength !== names.length);
(0, ast_types_1.visit)(ast, {
- visitCallExpression(path8) {
- if (checkNames(path8.node, names)) {
+ visitCallExpression(path10) {
+ if (checkNames(path10.node, names)) {
const delegate = false;
- const { name, parent: { node: pNode } } = path8;
- const expr = ast_types_1.builders.awaitExpression(path8.node, delegate);
+ const { name, parent: { node: pNode } } = path10;
+ const expr = ast_types_1.builders.awaitExpression(path10.node, delegate);
if (ast_types_1.namedTypes.CallExpression.check(pNode)) {
pNode.arguments[name] = expr;
} else {
pNode[name] = expr;
}
}
- this.traverse(path8);
+ this.traverse(path10);
}
});
return (0, escodegen_1.generate)(ast);
@@ -29122,9 +29201,9 @@ var require_dist8 = __commonJS({
}
});
-// .yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/dateRange.js
+// .yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/dateRange.js
var require_dateRange = __commonJS({
- ".yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/dateRange.js"(exports) {
+ ".yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/dateRange.js"(exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function dateRange() {
@@ -29134,9 +29213,9 @@ var require_dateRange = __commonJS({
}
});
-// .yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/dnsDomainIs.js
+// .yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/dnsDomainIs.js
var require_dnsDomainIs = __commonJS({
- ".yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/dnsDomainIs.js"(exports) {
+ ".yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/dnsDomainIs.js"(exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function dnsDomainIs(host, domain) {
@@ -29148,9 +29227,9 @@ var require_dnsDomainIs = __commonJS({
}
});
-// .yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/dnsDomainLevels.js
+// .yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/dnsDomainLevels.js
var require_dnsDomainLevels = __commonJS({
- ".yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/dnsDomainLevels.js"(exports) {
+ ".yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/dnsDomainLevels.js"(exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function dnsDomainLevels(host) {
@@ -29165,9 +29244,9 @@ var require_dnsDomainLevels = __commonJS({
}
});
-// .yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/util.js
+// .yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/util.js
var require_util3 = __commonJS({
- ".yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/util.js"(exports) {
+ ".yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/util.js"(exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.isGMT = exports.dnsLookup = void 0;
@@ -29191,9 +29270,9 @@ var require_util3 = __commonJS({
}
});
-// .yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/dnsResolve.js
+// .yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/dnsResolve.js
var require_dnsResolve = __commonJS({
- ".yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/dnsResolve.js"(exports) {
+ ".yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/dnsResolve.js"(exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var util_1 = require_util3();
@@ -29407,9 +29486,9 @@ var require_netmask = __commonJS({
}
});
-// .yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/isInNet.js
+// .yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/isInNet.js
var require_isInNet = __commonJS({
- ".yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/isInNet.js"(exports) {
+ ".yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/isInNet.js"(exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var netmask_1 = require_netmask();
@@ -29430,9 +29509,9 @@ var require_isInNet = __commonJS({
}
});
-// .yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/isPlainHostName.js
+// .yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/isPlainHostName.js
var require_isPlainHostName = __commonJS({
- ".yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/isPlainHostName.js"(exports) {
+ ".yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/isPlainHostName.js"(exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function isPlainHostName(host) {
@@ -29442,9 +29521,9 @@ var require_isPlainHostName = __commonJS({
}
});
-// .yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/isResolvable.js
+// .yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/isResolvable.js
var require_isResolvable = __commonJS({
- ".yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/isResolvable.js"(exports) {
+ ".yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/isResolvable.js"(exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var util_1 = require_util3();
@@ -29462,9 +29541,9 @@ var require_isResolvable = __commonJS({
}
});
-// .yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/localHostOrDomainIs.js
+// .yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/localHostOrDomainIs.js
var require_localHostOrDomainIs = __commonJS({
- ".yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/localHostOrDomainIs.js"(exports) {
+ ".yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/localHostOrDomainIs.js"(exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function localHostOrDomainIs(host, hostdom) {
@@ -29488,7 +29567,7 @@ var require_ip2 = __commonJS({
".yarn/cache/ip-npm-1.1.8-abea558b72-bb1850e7b6.zip/node_modules/ip/lib/ip.js"(exports) {
var ip = exports;
var { Buffer: Buffer2 } = require("buffer");
- var os2 = require("os");
+ var os3 = require("os");
ip.toBuffer = function(ip2, buff, offset) {
offset = ~~offset;
var result;
@@ -29748,7 +29827,7 @@ var require_ip2 = __commonJS({
return family === "ipv4" ? "127.0.0.1" : "fe80::1";
};
ip.address = function(name, family) {
- var interfaces = os2.networkInterfaces();
+ var interfaces = os3.networkInterfaces();
family = _normalizeFamily(family);
if (name && name !== "private" && name !== "public") {
var res = interfaces[name].filter((details) => {
@@ -29789,9 +29868,9 @@ var require_ip2 = __commonJS({
}
});
-// .yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/myIpAddress.js
+// .yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/myIpAddress.js
var require_myIpAddress = __commonJS({
- ".yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/myIpAddress.js"(exports) {
+ ".yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/myIpAddress.js"(exports) {
"use strict";
var __importDefault2 = exports && exports.__importDefault || function(mod) {
return mod && mod.__esModule ? mod : { "default": mod };
@@ -29824,9 +29903,9 @@ var require_myIpAddress = __commonJS({
}
});
-// .yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/shExpMatch.js
+// .yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/shExpMatch.js
var require_shExpMatch = __commonJS({
- ".yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/shExpMatch.js"(exports) {
+ ".yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/shExpMatch.js"(exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function shExpMatch(str, shexp) {
@@ -29841,9 +29920,9 @@ var require_shExpMatch = __commonJS({
}
});
-// .yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/timeRange.js
+// .yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/timeRange.js
var require_timeRange = __commonJS({
- ".yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/timeRange.js"(exports) {
+ ".yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/timeRange.js"(exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function timeRange() {
@@ -29888,9 +29967,9 @@ var require_timeRange = __commonJS({
}
});
-// .yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/weekdayRange.js
+// .yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/weekdayRange.js
var require_weekdayRange = __commonJS({
- ".yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/weekdayRange.js"(exports) {
+ ".yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/weekdayRange.js"(exports) {
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var util_1 = require_util3();
@@ -29936,9 +30015,9 @@ var require_weekdayRange = __commonJS({
}
});
-// .yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/index.js
+// .yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/index.js
var require_dist9 = __commonJS({
- ".yarn/cache/pac-resolver-npm-6.0.1-a28b1bcfbc-cbe90f8f12.zip/node_modules/pac-resolver/dist/index.js"(exports) {
+ ".yarn/cache/pac-resolver-npm-6.0.2-9e9298321a-e77d61b35d.zip/node_modules/pac-resolver/dist/index.js"(exports) {
"use strict";
var __importDefault2 = exports && exports.__importDefault || function(mod) {
return mod && mod.__esModule ? mod : { "default": mod };
@@ -30013,9 +30092,9 @@ var require_dist9 = __commonJS({
}
});
-// .yarn/cache/pac-proxy-agent-npm-6.0.2-b8b621b262-0b263da7a6.zip/node_modules/pac-proxy-agent/dist/index.js
+// .yarn/cache/pac-proxy-agent-npm-6.0.4-fab524db53-26e60ad16f.zip/node_modules/pac-proxy-agent/dist/index.js
var require_dist10 = __commonJS({
- ".yarn/cache/pac-proxy-agent-npm-6.0.2-b8b621b262-0b263da7a6.zip/node_modules/pac-proxy-agent/dist/index.js"(exports) {
+ ".yarn/cache/pac-proxy-agent-npm-6.0.4-fab524db53-26e60ad16f.zip/node_modules/pac-proxy-agent/dist/index.js"(exports) {
"use strict";
var __createBinding2 = exports && exports.__createBinding || (Object.create ? function(o, m, k, k2) {
if (k2 === void 0)
@@ -30139,17 +30218,17 @@ var require_dist10 = __commonJS({
const { secureEndpoint } = opts;
const resolver = await this.getResolver();
const defaultPort = secureEndpoint ? 443 : 80;
- let path8 = req.path;
+ let path10 = req.path;
let search = null;
- const firstQuestion = path8.indexOf("?");
+ const firstQuestion = path10.indexOf("?");
if (firstQuestion !== -1) {
- search = path8.substring(firstQuestion);
- path8 = path8.substring(0, firstQuestion);
+ search = path10.substring(firstQuestion);
+ path10 = path10.substring(0, firstQuestion);
}
const urlOpts = {
...opts,
protocol: secureEndpoint ? "https:" : "http:",
- pathname: path8,
+ pathname: path10,
search,
// need to use `hostname` instead of `host` otherwise `port` is ignored
hostname: opts.host,
@@ -30219,19 +30298,19 @@ var require_dist10 = __commonJS({
}
};
PacProxyAgent.protocols = [
- "pac-data",
- "pac-file",
- "pac-ftp",
- "pac-http",
- "pac-https"
+ "pac+data",
+ "pac+file",
+ "pac+ftp",
+ "pac+http",
+ "pac+https"
];
exports.PacProxyAgent = PacProxyAgent;
}
});
-// .yarn/cache/proxy-agent-npm-6.2.0-ad375074b5-bd8415b36a.zip/node_modules/proxy-agent/dist/index.js
+// .yarn/cache/proxy-agent-npm-6.2.2-678133ab7b-481d168121.zip/node_modules/proxy-agent/dist/index.js
var require_dist11 = __commonJS({
- ".yarn/cache/proxy-agent-npm-6.2.0-ad375074b5-bd8415b36a.zip/node_modules/proxy-agent/dist/index.js"(exports) {
+ ".yarn/cache/proxy-agent-npm-6.2.2-678133ab7b-481d168121.zip/node_modules/proxy-agent/dist/index.js"(exports) {
"use strict";
var __createBinding2 = exports && exports.__createBinding || (Object.create ? function(o, m, k, k2) {
if (k2 === void 0)
@@ -30294,11 +30373,11 @@ var require_dist11 = __commonJS({
socks4a: [socks_proxy_agent_1.SocksProxyAgent, socks_proxy_agent_1.SocksProxyAgent],
socks5: [socks_proxy_agent_1.SocksProxyAgent, socks_proxy_agent_1.SocksProxyAgent],
socks5h: [socks_proxy_agent_1.SocksProxyAgent, socks_proxy_agent_1.SocksProxyAgent],
- "pac-data": [pac_proxy_agent_1.PacProxyAgent, pac_proxy_agent_1.PacProxyAgent],
- "pac-file": [pac_proxy_agent_1.PacProxyAgent, pac_proxy_agent_1.PacProxyAgent],
- "pac-ftp": [pac_proxy_agent_1.PacProxyAgent, pac_proxy_agent_1.PacProxyAgent],
- "pac-http": [pac_proxy_agent_1.PacProxyAgent, pac_proxy_agent_1.PacProxyAgent],
- "pac-https": [pac_proxy_agent_1.PacProxyAgent, pac_proxy_agent_1.PacProxyAgent]
+ "pac+data": [pac_proxy_agent_1.PacProxyAgent, pac_proxy_agent_1.PacProxyAgent],
+ "pac+file": [pac_proxy_agent_1.PacProxyAgent, pac_proxy_agent_1.PacProxyAgent],
+ "pac+ftp": [pac_proxy_agent_1.PacProxyAgent, pac_proxy_agent_1.PacProxyAgent],
+ "pac+http": [pac_proxy_agent_1.PacProxyAgent, pac_proxy_agent_1.PacProxyAgent],
+ "pac+https": [pac_proxy_agent_1.PacProxyAgent, pac_proxy_agent_1.PacProxyAgent]
};
function isValidProtocol(v) {
return PROTOCOLS.includes(v);
@@ -30315,7 +30394,8 @@ var require_dist11 = __commonJS({
}
async connect(req, opts) {
const { secureEndpoint } = opts;
- const protocol = secureEndpoint ? "https:" : "http:";
+ const isWebSocket = req.getHeader("upgrade") === "websocket";
+ const protocol = secureEndpoint ? isWebSocket ? "wss:" : "https:" : isWebSocket ? "ws:" : "http:";
const host = req.getHeader("host");
const url = new URL(req.path, `${protocol}//${host}`).href;
const proxy = this.getProxyForUrl(url);
@@ -30333,7 +30413,7 @@ var require_dist11 = __commonJS({
if (!isValidProtocol(proxyProto)) {
throw new Error(`Unsupported protocol for proxy URL: ${proxy}`);
}
- const ctor = exports.proxies[proxyProto][secureEndpoint ? 1 : 0];
+ const ctor = exports.proxies[proxyProto][secureEndpoint || isWebSocket ? 1 : 0];
agent = new ctor(proxy, this.connectOpts);
this.cache.set(cacheKey, agent);
} else {
@@ -32246,10 +32326,10 @@ var require_header = __commonJS({
}
const prefixSize = this.ctime || this.atime ? 130 : 155;
const split = splitPrefix(this.path || "", prefixSize);
- const path8 = split[0];
+ const path10 = split[0];
const prefix = split[1];
this.needPax = split[2];
- this.needPax = encString(buf, off, 100, path8) || this.needPax;
+ this.needPax = encString(buf, off, 100, path10) || this.needPax;
this.needPax = encNumber(buf, off + 100, 8, this.mode) || this.needPax;
this.needPax = encNumber(buf, off + 108, 8, this.uid) || this.needPax;
this.needPax = encNumber(buf, off + 116, 8, this.gid) || this.needPax;
@@ -32359,7 +32439,7 @@ var require_pax = __commonJS({
".yarn/cache/tar-npm-6.1.15-44c3e71720-815c25f881.zip/node_modules/tar/lib/pax.js"(exports, module2) {
"use strict";
var Header = require_header();
- var path8 = require("path");
+ var path10 = require("path");
var Pax = class {
constructor(obj, global2) {
this.atime = obj.atime || null;
@@ -32394,7 +32474,7 @@ var require_pax = __commonJS({
// XXX split the path
// then the path should be PaxHeader + basename, but less than 99,
// prepend with the dirname
- path: ("PaxHeader/" + path8.basename(this.path)).slice(0, 99),
+ path: ("PaxHeader/" + path10.basename(this.path)).slice(0, 99),
mode: this.mode || 420,
uid: this.uid || null,
gid: this.gid || null,
@@ -32525,16 +32605,16 @@ var require_winchars = __commonJS({
var require_strip_absolute_path = __commonJS({
".yarn/cache/tar-npm-6.1.15-44c3e71720-815c25f881.zip/node_modules/tar/lib/strip-absolute-path.js"(exports, module2) {
var { isAbsolute, parse } = require("path").win32;
- module2.exports = (path8) => {
+ module2.exports = (path10) => {
let r = "";
- let parsed = parse(path8);
- while (isAbsolute(path8) || parsed.root) {
- const root = path8.charAt(0) === "/" && path8.slice(0, 4) !== "//?/" ? "/" : parsed.root;
- path8 = path8.slice(root.length);
+ let parsed = parse(path10);
+ while (isAbsolute(path10) || parsed.root) {
+ const root = path10.charAt(0) === "/" && path10.slice(0, 4) !== "//?/" ? "/" : parsed.root;
+ path10 = path10.slice(root.length);
r += root;
- parsed = parse(path8);
+ parsed = parse(path10);
}
- return [r, path8];
+ return [r, path10];
};
}
});
@@ -32571,16 +32651,16 @@ var require_write_entry = __commonJS({
var { Minipass } = require_minipass();
var Pax = require_pax();
var Header = require_header();
- var fs6 = require("fs");
- var path8 = require("path");
+ var fs8 = require("fs");
+ var path10 = require("path");
var normPath = require_normalize_windows_path();
var stripSlash = require_strip_trailing_slashes();
- var prefixPath = (path9, prefix) => {
+ var prefixPath = (path11, prefix) => {
if (!prefix) {
- return normPath(path9);
+ return normPath(path11);
}
- path9 = normPath(path9).replace(/^\.(\/|$)/, "");
- return stripSlash(prefix) + "/" + path9;
+ path11 = normPath(path11).replace(/^\.(\/|$)/, "");
+ return stripSlash(prefix) + "/" + path11;
};
var maxReadSize = 16 * 1024 * 1024;
var PROCESS = Symbol("process");
@@ -32651,7 +32731,7 @@ var require_write_entry = __commonJS({
this.path = winchars.decode(this.path.replace(/\\/g, "/"));
p = p.replace(/\\/g, "/");
}
- this.absolute = normPath(opt.absolute || path8.resolve(this.cwd, p));
+ this.absolute = normPath(opt.absolute || path10.resolve(this.cwd, p));
if (this.path === "") {
this.path = "./";
}
@@ -32674,7 +32754,7 @@ var require_write_entry = __commonJS({
return super.emit(ev, ...data);
}
[LSTAT]() {
- fs6.lstat(this.absolute, (er, stat) => {
+ fs8.lstat(this.absolute, (er, stat) => {
if (er) {
return this.emit("error", er);
}
@@ -32706,8 +32786,8 @@ var require_write_entry = __commonJS({
[MODE](mode) {
return modeFix(mode, this.type === "Directory", this.portable);
}
- [PREFIX](path9) {
- return prefixPath(path9, this.prefix);
+ [PREFIX](path11) {
+ return prefixPath(path11, this.prefix);
}
[HEADER]() {
if (this.type === "Directory" && this.portable) {
@@ -32756,7 +32836,7 @@ var require_write_entry = __commonJS({
this.end();
}
[SYMLINK]() {
- fs6.readlink(this.absolute, (er, linkpath) => {
+ fs8.readlink(this.absolute, (er, linkpath) => {
if (er) {
return this.emit("error", er);
}
@@ -32770,7 +32850,7 @@ var require_write_entry = __commonJS({
}
[HARDLINK](linkpath) {
this.type = "Link";
- this.linkpath = normPath(path8.relative(this.cwd, linkpath));
+ this.linkpath = normPath(path10.relative(this.cwd, linkpath));
this.stat.size = 0;
this[HEADER]();
this.end();
@@ -32793,7 +32873,7 @@ var require_write_entry = __commonJS({
this[OPENFILE]();
}
[OPENFILE]() {
- fs6.open(this.absolute, "r", (er, fd) => {
+ fs8.open(this.absolute, "r", (er, fd) => {
if (er) {
return this.emit("error", er);
}
@@ -32817,7 +32897,7 @@ var require_write_entry = __commonJS({
}
[READ]() {
const { fd, buf, offset, length, pos } = this;
- fs6.read(fd, buf, offset, length, pos, (er, bytesRead) => {
+ fs8.read(fd, buf, offset, length, pos, (er, bytesRead) => {
if (er) {
return this[CLOSE](() => this.emit("error", er));
}
@@ -32825,7 +32905,7 @@ var require_write_entry = __commonJS({
});
}
[CLOSE](cb) {
- fs6.close(this.fd, cb);
+ fs8.close(this.fd, cb);
}
[ONREAD](bytesRead) {
if (bytesRead <= 0 && this.remain > 0) {
@@ -32889,19 +32969,19 @@ var require_write_entry = __commonJS({
});
var WriteEntrySync = class extends WriteEntry {
[LSTAT]() {
- this[ONLSTAT](fs6.lstatSync(this.absolute));
+ this[ONLSTAT](fs8.lstatSync(this.absolute));
}
[SYMLINK]() {
- this[ONREADLINK](fs6.readlinkSync(this.absolute));
+ this[ONREADLINK](fs8.readlinkSync(this.absolute));
}
[OPENFILE]() {
- this[ONOPENFILE](fs6.openSync(this.absolute, "r"));
+ this[ONOPENFILE](fs8.openSync(this.absolute, "r"));
}
[READ]() {
let threw = true;
try {
const { fd, buf, offset, length, pos } = this;
- const bytesRead = fs6.readSync(fd, buf, offset, length, pos);
+ const bytesRead = fs8.readSync(fd, buf, offset, length, pos);
this[ONREAD](bytesRead);
threw = false;
} finally {
@@ -32918,7 +32998,7 @@ var require_write_entry = __commonJS({
cb();
}
[CLOSE](cb) {
- fs6.closeSync(this.fd);
+ fs8.closeSync(this.fd);
cb();
}
};
@@ -33001,8 +33081,8 @@ var require_write_entry = __commonJS({
super.write(this.header.block);
readEntry.pipe(this);
}
- [PREFIX](path9) {
- return prefixPath(path9, this.prefix);
+ [PREFIX](path11) {
+ return prefixPath(path11, this.prefix);
}
[MODE](mode) {
return modeFix(mode, this.type === "Directory", this.portable);
@@ -33034,8 +33114,8 @@ var require_pack = __commonJS({
".yarn/cache/tar-npm-6.1.15-44c3e71720-815c25f881.zip/node_modules/tar/lib/pack.js"(exports, module2) {
"use strict";
var PackJob = class {
- constructor(path9, absolute) {
- this.path = path9 || "./";
+ constructor(path11, absolute) {
+ this.path = path11 || "./";
this.absolute = absolute;
this.entry = null;
this.stat = null;
@@ -33073,8 +33153,8 @@ var require_pack = __commonJS({
var WRITEENTRYCLASS = Symbol("writeEntryClass");
var WRITE = Symbol("write");
var ONDRAIN = Symbol("ondrain");
- var fs6 = require("fs");
- var path8 = require("path");
+ var fs8 = require("fs");
+ var path10 = require("path");
var warner = require_warn_mixin();
var normPath = require_normalize_windows_path();
var Pack = warner(class Pack extends Minipass {
@@ -33127,31 +33207,31 @@ var require_pack = __commonJS({
[WRITE](chunk) {
return super.write(chunk);
}
- add(path9) {
- this.write(path9);
+ add(path11) {
+ this.write(path11);
return this;
}
- end(path9) {
- if (path9) {
- this.write(path9);
+ end(path11) {
+ if (path11) {
+ this.write(path11);
}
this[ENDED] = true;
this[PROCESS]();
return this;
}
- write(path9) {
+ write(path11) {
if (this[ENDED]) {
throw new Error("write after end");
}
- if (path9 instanceof ReadEntry) {
- this[ADDTARENTRY](path9);
+ if (path11 instanceof ReadEntry) {
+ this[ADDTARENTRY](path11);
} else {
- this[ADDFSENTRY](path9);
+ this[ADDFSENTRY](path11);
}
return this.flowing;
}
[ADDTARENTRY](p) {
- const absolute = normPath(path8.resolve(this.cwd, p.path));
+ const absolute = normPath(path10.resolve(this.cwd, p.path));
if (!this.filter(p.path, p)) {
p.resume();
} else {
@@ -33164,7 +33244,7 @@ var require_pack = __commonJS({
this[PROCESS]();
}
[ADDFSENTRY](p) {
- const absolute = normPath(path8.resolve(this.cwd, p));
+ const absolute = normPath(path10.resolve(this.cwd, p));
this[QUEUE].push(new PackJob(p, absolute));
this[PROCESS]();
}
@@ -33172,7 +33252,7 @@ var require_pack = __commonJS({
job.pending = true;
this[JOBS] += 1;
const stat = this.follow ? "stat" : "lstat";
- fs6[stat](job.absolute, (er, stat2) => {
+ fs8[stat](job.absolute, (er, stat2) => {
job.pending = false;
this[JOBS] -= 1;
if (er) {
@@ -33193,7 +33273,7 @@ var require_pack = __commonJS({
[READDIR](job) {
job.pending = true;
this[JOBS] += 1;
- fs6.readdir(job.absolute, (er, entries) => {
+ fs8.readdir(job.absolute, (er, entries) => {
job.pending = false;
this[JOBS] -= 1;
if (er) {
@@ -33355,10 +33435,10 @@ var require_pack = __commonJS({
}
[STAT](job) {
const stat = this.follow ? "statSync" : "lstatSync";
- this[ONSTAT](job, fs6[stat](job.absolute));
+ this[ONSTAT](job, fs8[stat](job.absolute));
}
[READDIR](job, stat) {
- this[ONREADDIR](job, fs6.readdirSync(job.absolute));
+ this[ONREADDIR](job, fs8.readdirSync(job.absolute));
}
// gotta get it all in this tick
[PIPE](job) {
@@ -33393,8 +33473,8 @@ var require_fs_minipass = __commonJS({
"use strict";
var MiniPass = require_minipass2();
var EE = require("events").EventEmitter;
- var fs6 = require("fs");
- var writev = fs6.writev;
+ var fs8 = require("fs");
+ var writev = fs8.writev;
if (!writev) {
const binding = process.binding("fs");
const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback;
@@ -33434,16 +33514,16 @@ var require_fs_minipass = __commonJS({
var _defaultFlag = Symbol("_defaultFlag");
var _errored = Symbol("_errored");
var ReadStream = class extends MiniPass {
- constructor(path8, opt) {
+ constructor(path10, opt) {
opt = opt || {};
super(opt);
this.readable = true;
this.writable = false;
- if (typeof path8 !== "string")
+ if (typeof path10 !== "string")
throw new TypeError("path must be a string");
this[_errored] = false;
this[_fd] = typeof opt.fd === "number" ? opt.fd : null;
- this[_path] = path8;
+ this[_path] = path10;
this[_readSize] = opt.readSize || 16 * 1024 * 1024;
this[_reading] = false;
this[_size] = typeof opt.size === "number" ? opt.size : Infinity;
@@ -33467,7 +33547,7 @@ var require_fs_minipass = __commonJS({
throw new TypeError("this is a readable stream");
}
[_open]() {
- fs6.open(this[_path], "r", (er, fd) => this[_onopen](er, fd));
+ fs8.open(this[_path], "r", (er, fd) => this[_onopen](er, fd));
}
[_onopen](er, fd) {
if (er)
@@ -33487,7 +33567,7 @@ var require_fs_minipass = __commonJS({
const buf = this[_makeBuf]();
if (buf.length === 0)
return process.nextTick(() => this[_onread](null, 0, buf));
- fs6.read(this[_fd], buf, 0, buf.length, null, (er, br, buf2) => this[_onread](er, br, buf2));
+ fs8.read(this[_fd], buf, 0, buf.length, null, (er, br, buf2) => this[_onread](er, br, buf2));
}
}
[_onread](er, br, buf) {
@@ -33501,7 +33581,7 @@ var require_fs_minipass = __commonJS({
if (this[_autoClose] && typeof this[_fd] === "number") {
const fd = this[_fd];
this[_fd] = null;
- fs6.close(fd, (er) => er ? this.emit("error", er) : this.emit("close"));
+ fs8.close(fd, (er) => er ? this.emit("error", er) : this.emit("close"));
}
}
[_onerror](er) {
@@ -33544,7 +33624,7 @@ var require_fs_minipass = __commonJS({
[_open]() {
let threw = true;
try {
- this[_onopen](null, fs6.openSync(this[_path], "r"));
+ this[_onopen](null, fs8.openSync(this[_path], "r"));
threw = false;
} finally {
if (threw)
@@ -33558,7 +33638,7 @@ var require_fs_minipass = __commonJS({
this[_reading] = true;
do {
const buf = this[_makeBuf]();
- const br = buf.length === 0 ? 0 : fs6.readSync(this[_fd], buf, 0, buf.length, null);
+ const br = buf.length === 0 ? 0 : fs8.readSync(this[_fd], buf, 0, buf.length, null);
if (!this[_handleChunk](br, buf))
break;
} while (true);
@@ -33574,13 +33654,13 @@ var require_fs_minipass = __commonJS({
if (this[_autoClose] && typeof this[_fd] === "number") {
const fd = this[_fd];
this[_fd] = null;
- fs6.closeSync(fd);
+ fs8.closeSync(fd);
this.emit("close");
}
}
};
var WriteStream = class extends EE {
- constructor(path8, opt) {
+ constructor(path10, opt) {
opt = opt || {};
super(opt);
this.readable = false;
@@ -33590,7 +33670,7 @@ var require_fs_minipass = __commonJS({
this[_ended] = false;
this[_needDrain] = false;
this[_queue] = [];
- this[_path] = path8;
+ this[_path] = path10;
this[_fd] = typeof opt.fd === "number" ? opt.fd : null;
this[_mode] = opt.mode === void 0 ? 438 : opt.mode;
this[_pos] = typeof opt.start === "number" ? opt.start : null;
@@ -33621,7 +33701,7 @@ var require_fs_minipass = __commonJS({
this.emit("error", er);
}
[_open]() {
- fs6.open(
+ fs8.open(
this[_path],
this[_flags],
this[_mode],
@@ -33665,7 +33745,7 @@ var require_fs_minipass = __commonJS({
return true;
}
[_write](buf) {
- fs6.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => this[_onwrite](er, bw));
+ fs8.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => this[_onwrite](er, bw));
}
[_onwrite](er, bw) {
if (er)
@@ -33709,7 +33789,7 @@ var require_fs_minipass = __commonJS({
if (this[_autoClose] && typeof this[_fd] === "number") {
const fd = this[_fd];
this[_fd] = null;
- fs6.close(fd, (er) => er ? this.emit("error", er) : this.emit("close"));
+ fs8.close(fd, (er) => er ? this.emit("error", er) : this.emit("close"));
}
}
};
@@ -33718,7 +33798,7 @@ var require_fs_minipass = __commonJS({
let fd;
if (this[_defaultFlag] && this[_flags] === "r+") {
try {
- fd = fs6.openSync(this[_path], this[_flags], this[_mode]);
+ fd = fs8.openSync(this[_path], this[_flags], this[_mode]);
} catch (er) {
if (er.code === "ENOENT") {
this[_flags] = "w";
@@ -33727,14 +33807,14 @@ var require_fs_minipass = __commonJS({
throw er;
}
} else
- fd = fs6.openSync(this[_path], this[_flags], this[_mode]);
+ fd = fs8.openSync(this[_path], this[_flags], this[_mode]);
this[_onopen](null, fd);
}
[_close]() {
if (this[_autoClose] && typeof this[_fd] === "number") {
const fd = this[_fd];
this[_fd] = null;
- fs6.closeSync(fd);
+ fs8.closeSync(fd);
this.emit("close");
}
}
@@ -33743,7 +33823,7 @@ var require_fs_minipass = __commonJS({
try {
this[_onwrite](
null,
- fs6.writeSync(this[_fd], buf, 0, buf.length, this[_pos])
+ fs8.writeSync(this[_fd], buf, 0, buf.length, this[_pos])
);
threw = false;
} finally {
@@ -34169,9 +34249,9 @@ var require_list = __commonJS({
"use strict";
var hlo = require_high_level_opt();
var Parser = require_parse2();
- var fs6 = require("fs");
+ var fs8 = require("fs");
var fsm = require_fs_minipass();
- var path8 = require("path");
+ var path10 = require("path");
var stripSlash = require_strip_trailing_slashes();
module2.exports = (opt_, files, cb) => {
if (typeof opt_ === "function") {
@@ -34213,8 +34293,8 @@ var require_list = __commonJS({
const map = new Map(files.map((f) => [stripSlash(f), true]));
const filter = opt.filter;
const mapHas = (file, r) => {
- const root = r || path8.parse(file).root || ".";
- const ret = file === root ? false : map.has(file) ? map.get(file) : mapHas(path8.dirname(file), root);
+ const root = r || path10.parse(file).root || ".";
+ const ret = file === root ? false : map.has(file) ? map.get(file) : mapHas(path10.dirname(file), root);
map.set(file, ret);
return ret;
};
@@ -34226,16 +34306,16 @@ var require_list = __commonJS({
let threw = true;
let fd;
try {
- const stat = fs6.statSync(file);
+ const stat = fs8.statSync(file);
const readSize = opt.maxReadSize || 16 * 1024 * 1024;
if (stat.size < readSize) {
- p.end(fs6.readFileSync(file));
+ p.end(fs8.readFileSync(file));
} else {
let pos = 0;
const buf = Buffer.allocUnsafe(readSize);
- fd = fs6.openSync(file, "r");
+ fd = fs8.openSync(file, "r");
while (pos < stat.size) {
- const bytesRead = fs6.readSync(fd, buf, 0, readSize, pos);
+ const bytesRead = fs8.readSync(fd, buf, 0, readSize, pos);
pos += bytesRead;
p.write(buf.slice(0, bytesRead));
}
@@ -34245,7 +34325,7 @@ var require_list = __commonJS({
} finally {
if (threw && fd) {
try {
- fs6.closeSync(fd);
+ fs8.closeSync(fd);
} catch (er) {
}
}
@@ -34258,7 +34338,7 @@ var require_list = __commonJS({
const p = new Promise((resolve, reject) => {
parse.on("error", reject);
parse.on("end", resolve);
- fs6.stat(file, (er, stat) => {
+ fs8.stat(file, (er, stat) => {
if (er) {
reject(er);
} else {
@@ -34285,7 +34365,7 @@ var require_create = __commonJS({
var Pack = require_pack();
var fsm = require_fs_minipass();
var t = require_list();
- var path8 = require("path");
+ var path10 = require("path");
module2.exports = (opt_, files, cb) => {
if (typeof files === "function") {
cb = files;
@@ -34332,7 +34412,7 @@ var require_create = __commonJS({
files.forEach((file) => {
if (file.charAt(0) === "@") {
t({
- file: path8.resolve(p.cwd, file.slice(1)),
+ file: path10.resolve(p.cwd, file.slice(1)),
sync: true,
noResume: true,
onentry: (entry) => p.add(entry)
@@ -34348,7 +34428,7 @@ var require_create = __commonJS({
const file = files.shift();
if (file.charAt(0) === "@") {
return t({
- file: path8.resolve(p.cwd, file.slice(1)),
+ file: path10.resolve(p.cwd, file.slice(1)),
noResume: true,
onentry: (entry) => p.add(entry)
}).then((_) => addFilesAsync(p, files));
@@ -34377,10 +34457,10 @@ var require_replace = __commonJS({
"use strict";
var hlo = require_high_level_opt();
var Pack = require_pack();
- var fs6 = require("fs");
+ var fs8 = require("fs");
var fsm = require_fs_minipass();
var t = require_list();
- var path8 = require("path");
+ var path10 = require("path");
var Header = require_header();
module2.exports = (opt_, files, cb) => {
const opt = hlo(opt_);
@@ -34403,20 +34483,20 @@ var require_replace = __commonJS({
let position;
try {
try {
- fd = fs6.openSync(opt.file, "r+");
+ fd = fs8.openSync(opt.file, "r+");
} catch (er) {
if (er.code === "ENOENT") {
- fd = fs6.openSync(opt.file, "w+");
+ fd = fs8.openSync(opt.file, "w+");
} else {
throw er;
}
}
- const st = fs6.fstatSync(fd);
+ const st = fs8.fstatSync(fd);
const headBuf = Buffer.alloc(512);
POSITION:
for (position = 0; position < st.size; position += 512) {
for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
- bytes = fs6.readSync(
+ bytes = fs8.readSync(
fd,
headBuf,
bufPos,
@@ -34448,7 +34528,7 @@ var require_replace = __commonJS({
} finally {
if (threw) {
try {
- fs6.closeSync(fd);
+ fs8.closeSync(fd);
} catch (er) {
}
}
@@ -34468,7 +34548,7 @@ var require_replace = __commonJS({
const getPos = (fd, size, cb_) => {
const cb2 = (er, pos) => {
if (er) {
- fs6.close(fd, (_) => cb_(er));
+ fs8.close(fd, (_) => cb_(er));
} else {
cb_(null, pos);
}
@@ -34485,7 +34565,7 @@ var require_replace = __commonJS({
}
bufPos += bytes;
if (bufPos < 512 && bytes) {
- return fs6.read(
+ return fs8.read(
fd,
headBuf,
bufPos,
@@ -34516,9 +34596,9 @@ var require_replace = __commonJS({
opt.mtimeCache.set(h.path, h.mtime);
}
bufPos = 0;
- fs6.read(fd, headBuf, 0, 512, position, onread);
+ fs8.read(fd, headBuf, 0, 512, position, onread);
};
- fs6.read(fd, headBuf, 0, 512, position, onread);
+ fs8.read(fd, headBuf, 0, 512, position, onread);
};
const promise = new Promise((resolve, reject) => {
p.on("error", reject);
@@ -34526,14 +34606,14 @@ var require_replace = __commonJS({
const onopen = (er, fd) => {
if (er && er.code === "ENOENT" && flag === "r+") {
flag = "w+";
- return fs6.open(opt.file, flag, onopen);
+ return fs8.open(opt.file, flag, onopen);
}
if (er) {
return reject(er);
}
- fs6.fstat(fd, (er2, st) => {
+ fs8.fstat(fd, (er2, st) => {
if (er2) {
- return fs6.close(fd, () => reject(er2));
+ return fs8.close(fd, () => reject(er2));
}
getPos(fd, st.size, (er3, position) => {
if (er3) {
@@ -34550,7 +34630,7 @@ var require_replace = __commonJS({
});
});
};
- fs6.open(opt.file, flag, onopen);
+ fs8.open(opt.file, flag, onopen);
});
return cb ? promise.then(cb, cb) : promise;
};
@@ -34558,7 +34638,7 @@ var require_replace = __commonJS({
files.forEach((file) => {
if (file.charAt(0) === "@") {
t({
- file: path8.resolve(p.cwd, file.slice(1)),
+ file: path10.resolve(p.cwd, file.slice(1)),
sync: true,
noResume: true,
onentry: (entry) => p.add(entry)
@@ -34574,7 +34654,7 @@ var require_replace = __commonJS({
const file = files.shift();
if (file.charAt(0) === "@") {
return t({
- file: path8.resolve(p.cwd, file.slice(1)),
+ file: path10.resolve(p.cwd, file.slice(1)),
noResume: true,
onentry: (entry) => p.add(entry)
}).then((_) => addFilesAsync(p, files));
@@ -34613,7 +34693,7 @@ var require_update = __commonJS({
if (!opt.mtimeCache) {
opt.mtimeCache = /* @__PURE__ */ new Map();
}
- opt.filter = filter ? (path8, stat) => filter(path8, stat) && !(opt.mtimeCache.get(path8) > stat.mtime) : (path8, stat) => !(opt.mtimeCache.get(path8) > stat.mtime);
+ opt.filter = filter ? (path10, stat) => filter(path10, stat) && !(opt.mtimeCache.get(path10) > stat.mtime) : (path10, stat) => !(opt.mtimeCache.get(path10) > stat.mtime);
};
}
});
@@ -34622,24 +34702,24 @@ var require_update = __commonJS({
var require_opts_arg = __commonJS({
".yarn/cache/mkdirp-npm-1.0.4-37f6ef56b9-1233611198.zip/node_modules/mkdirp/lib/opts-arg.js"(exports, module2) {
var { promisify } = require("util");
- var fs6 = require("fs");
+ var fs8 = require("fs");
var optsArg = (opts) => {
if (!opts)
- opts = { mode: 511, fs: fs6 };
+ opts = { mode: 511, fs: fs8 };
else if (typeof opts === "object")
- opts = { mode: 511, fs: fs6, ...opts };
+ opts = { mode: 511, fs: fs8, ...opts };
else if (typeof opts === "number")
- opts = { mode: opts, fs: fs6 };
+ opts = { mode: opts, fs: fs8 };
else if (typeof opts === "string")
- opts = { mode: parseInt(opts, 8), fs: fs6 };
+ opts = { mode: parseInt(opts, 8), fs: fs8 };
else
throw new TypeError("invalid options argument");
- opts.mkdir = opts.mkdir || opts.fs.mkdir || fs6.mkdir;
+ opts.mkdir = opts.mkdir || opts.fs.mkdir || fs8.mkdir;
opts.mkdirAsync = promisify(opts.mkdir);
- opts.stat = opts.stat || opts.fs.stat || fs6.stat;
+ opts.stat = opts.stat || opts.fs.stat || fs8.stat;
opts.statAsync = promisify(opts.stat);
- opts.statSync = opts.statSync || opts.fs.statSync || fs6.statSync;
- opts.mkdirSync = opts.mkdirSync || opts.fs.mkdirSync || fs6.mkdirSync;
+ opts.statSync = opts.statSync || opts.fs.statSync || fs8.statSync;
+ opts.mkdirSync = opts.mkdirSync || opts.fs.mkdirSync || fs8.mkdirSync;
return opts;
};
module2.exports = optsArg;
@@ -34651,28 +34731,28 @@ var require_path_arg = __commonJS({
".yarn/cache/mkdirp-npm-1.0.4-37f6ef56b9-1233611198.zip/node_modules/mkdirp/lib/path-arg.js"(exports, module2) {
var platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform;
var { resolve, parse } = require("path");
- var pathArg = (path8) => {
- if (/\0/.test(path8)) {
+ var pathArg = (path10) => {
+ if (/\0/.test(path10)) {
throw Object.assign(
new TypeError("path must be a string without null bytes"),
{
- path: path8,
+ path: path10,
code: "ERR_INVALID_ARG_VALUE"
}
);
}
- path8 = resolve(path8);
+ path10 = resolve(path10);
if (platform === "win32") {
const badWinChars = /[*|"<>?:]/;
- const { root } = parse(path8);
- if (badWinChars.test(path8.substr(root.length))) {
+ const { root } = parse(path10);
+ if (badWinChars.test(path10.substr(root.length))) {
throw Object.assign(new Error("Illegal characters in path."), {
- path: path8,
+ path: path10,
code: "EINVAL"
});
}
}
- return path8;
+ return path10;
};
module2.exports = pathArg;
}
@@ -34682,20 +34762,20 @@ var require_path_arg = __commonJS({
var require_find_made = __commonJS({
".yarn/cache/mkdirp-npm-1.0.4-37f6ef56b9-1233611198.zip/node_modules/mkdirp/lib/find-made.js"(exports, module2) {
var { dirname } = require("path");
- var findMade = (opts, parent, path8 = void 0) => {
- if (path8 === parent)
+ var findMade = (opts, parent, path10 = void 0) => {
+ if (path10 === parent)
return Promise.resolve();
return opts.statAsync(parent).then(
- (st) => st.isDirectory() ? path8 : void 0,
+ (st) => st.isDirectory() ? path10 : void 0,
// will fail later
(er) => er.code === "ENOENT" ? findMade(opts, dirname(parent), parent) : void 0
);
};
- var findMadeSync = (opts, parent, path8 = void 0) => {
- if (path8 === parent)
+ var findMadeSync = (opts, parent, path10 = void 0) => {
+ if (path10 === parent)
return void 0;
try {
- return opts.statSync(parent).isDirectory() ? path8 : void 0;
+ return opts.statSync(parent).isDirectory() ? path10 : void 0;
} catch (er) {
return er.code === "ENOENT" ? findMadeSync(opts, dirname(parent), parent) : void 0;
}
@@ -34708,21 +34788,21 @@ var require_find_made = __commonJS({
var require_mkdirp_manual = __commonJS({
".yarn/cache/mkdirp-npm-1.0.4-37f6ef56b9-1233611198.zip/node_modules/mkdirp/lib/mkdirp-manual.js"(exports, module2) {
var { dirname } = require("path");
- var mkdirpManual = (path8, opts, made) => {
+ var mkdirpManual = (path10, opts, made) => {
opts.recursive = false;
- const parent = dirname(path8);
- if (parent === path8) {
- return opts.mkdirAsync(path8, opts).catch((er) => {
+ const parent = dirname(path10);
+ if (parent === path10) {
+ return opts.mkdirAsync(path10, opts).catch((er) => {
if (er.code !== "EISDIR")
throw er;
});
}
- return opts.mkdirAsync(path8, opts).then(() => made || path8, (er) => {
+ return opts.mkdirAsync(path10, opts).then(() => made || path10, (er) => {
if (er.code === "ENOENT")
- return mkdirpManual(parent, opts).then((made2) => mkdirpManual(path8, opts, made2));
+ return mkdirpManual(parent, opts).then((made2) => mkdirpManual(path10, opts, made2));
if (er.code !== "EEXIST" && er.code !== "EROFS")
throw er;
- return opts.statAsync(path8).then((st) => {
+ return opts.statAsync(path10).then((st) => {
if (st.isDirectory())
return made;
else
@@ -34732,12 +34812,12 @@ var require_mkdirp_manual = __commonJS({
});
});
};
- var mkdirpManualSync = (path8, opts, made) => {
- const parent = dirname(path8);
+ var mkdirpManualSync = (path10, opts, made) => {
+ const parent = dirname(path10);
opts.recursive = false;
- if (parent === path8) {
+ if (parent === path10) {
try {
- return opts.mkdirSync(path8, opts);
+ return opts.mkdirSync(path10, opts);
} catch (er) {
if (er.code !== "EISDIR")
throw er;
@@ -34746,15 +34826,15 @@ var require_mkdirp_manual = __commonJS({
}
}
try {
- opts.mkdirSync(path8, opts);
- return made || path8;
+ opts.mkdirSync(path10, opts);
+ return made || path10;
} catch (er) {
if (er.code === "ENOENT")
- return mkdirpManualSync(path8, opts, mkdirpManualSync(parent, opts, made));
+ return mkdirpManualSync(path10, opts, mkdirpManualSync(parent, opts, made));
if (er.code !== "EEXIST" && er.code !== "EROFS")
throw er;
try {
- if (!opts.statSync(path8).isDirectory())
+ if (!opts.statSync(path10).isDirectory())
throw er;
} catch (_) {
throw er;
@@ -34771,30 +34851,30 @@ var require_mkdirp_native = __commonJS({
var { dirname } = require("path");
var { findMade, findMadeSync } = require_find_made();
var { mkdirpManual, mkdirpManualSync } = require_mkdirp_manual();
- var mkdirpNative = (path8, opts) => {
+ var mkdirpNative = (path10, opts) => {
opts.recursive = true;
- const parent = dirname(path8);
- if (parent === path8)
- return opts.mkdirAsync(path8, opts);
- return findMade(opts, path8).then((made) => opts.mkdirAsync(path8, opts).then(() => made).catch((er) => {
+ const parent = dirname(path10);
+ if (parent === path10)
+ return opts.mkdirAsync(path10, opts);
+ return findMade(opts, path10).then((made) => opts.mkdirAsync(path10, opts).then(() => made).catch((er) => {
if (er.code === "ENOENT")
- return mkdirpManual(path8, opts);
+ return mkdirpManual(path10, opts);
else
throw er;
}));
};
- var mkdirpNativeSync = (path8, opts) => {
+ var mkdirpNativeSync = (path10, opts) => {
opts.recursive = true;
- const parent = dirname(path8);
- if (parent === path8)
- return opts.mkdirSync(path8, opts);
- const made = findMadeSync(opts, path8);
+ const parent = dirname(path10);
+ if (parent === path10)
+ return opts.mkdirSync(path10, opts);
+ const made = findMadeSync(opts, path10);
try {
- opts.mkdirSync(path8, opts);
+ opts.mkdirSync(path10, opts);
return made;
} catch (er) {
if (er.code === "ENOENT")
- return mkdirpManualSync(path8, opts);
+ return mkdirpManualSync(path10, opts);
else
throw er;
}
@@ -34806,12 +34886,12 @@ var require_mkdirp_native = __commonJS({
// .yarn/cache/mkdirp-npm-1.0.4-37f6ef56b9-1233611198.zip/node_modules/mkdirp/lib/use-native.js
var require_use_native = __commonJS({
".yarn/cache/mkdirp-npm-1.0.4-37f6ef56b9-1233611198.zip/node_modules/mkdirp/lib/use-native.js"(exports, module2) {
- var fs6 = require("fs");
+ var fs8 = require("fs");
var version2 = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version;
var versArr = version2.replace(/^v/, "").split(".");
var hasNative = +versArr[0] > 10 || +versArr[0] === 10 && +versArr[1] >= 12;
- var useNative = !hasNative ? () => false : (opts) => opts.mkdir === fs6.mkdir;
- var useNativeSync = !hasNative ? () => false : (opts) => opts.mkdirSync === fs6.mkdirSync;
+ var useNative = !hasNative ? () => false : (opts) => opts.mkdir === fs8.mkdir;
+ var useNativeSync = !hasNative ? () => false : (opts) => opts.mkdirSync === fs8.mkdirSync;
module2.exports = { useNative, useNativeSync };
}
});
@@ -34824,21 +34904,21 @@ var require_mkdirp = __commonJS({
var { mkdirpNative, mkdirpNativeSync } = require_mkdirp_native();
var { mkdirpManual, mkdirpManualSync } = require_mkdirp_manual();
var { useNative, useNativeSync } = require_use_native();
- var mkdirp = (path8, opts) => {
- path8 = pathArg(path8);
+ var mkdirp = (path10, opts) => {
+ path10 = pathArg(path10);
opts = optsArg(opts);
- return useNative(opts) ? mkdirpNative(path8, opts) : mkdirpManual(path8, opts);
+ return useNative(opts) ? mkdirpNative(path10, opts) : mkdirpManual(path10, opts);
};
- var mkdirpSync = (path8, opts) => {
- path8 = pathArg(path8);
+ var mkdirpSync = (path10, opts) => {
+ path10 = pathArg(path10);
opts = optsArg(opts);
- return useNativeSync(opts) ? mkdirpNativeSync(path8, opts) : mkdirpManualSync(path8, opts);
+ return useNativeSync(opts) ? mkdirpNativeSync(path10, opts) : mkdirpManualSync(path10, opts);
};
mkdirp.sync = mkdirpSync;
- mkdirp.native = (path8, opts) => mkdirpNative(pathArg(path8), optsArg(opts));
- mkdirp.manual = (path8, opts) => mkdirpManual(pathArg(path8), optsArg(opts));
- mkdirp.nativeSync = (path8, opts) => mkdirpNativeSync(pathArg(path8), optsArg(opts));
- mkdirp.manualSync = (path8, opts) => mkdirpManualSync(pathArg(path8), optsArg(opts));
+ mkdirp.native = (path10, opts) => mkdirpNative(pathArg(path10), optsArg(opts));
+ mkdirp.manual = (path10, opts) => mkdirpManual(pathArg(path10), optsArg(opts));
+ mkdirp.nativeSync = (path10, opts) => mkdirpNativeSync(pathArg(path10), optsArg(opts));
+ mkdirp.manualSync = (path10, opts) => mkdirpManualSync(pathArg(path10), optsArg(opts));
module2.exports = mkdirp;
}
});
@@ -34847,69 +34927,69 @@ var require_mkdirp = __commonJS({
var require_chownr = __commonJS({
".yarn/cache/chownr-npm-2.0.0-638f1c9c61-7b240ff920.zip/node_modules/chownr/chownr.js"(exports, module2) {
"use strict";
- var fs6 = require("fs");
- var path8 = require("path");
- var LCHOWN = fs6.lchown ? "lchown" : "chown";
- var LCHOWNSYNC = fs6.lchownSync ? "lchownSync" : "chownSync";
- var needEISDIRHandled = fs6.lchown && !process.version.match(/v1[1-9]+\./) && !process.version.match(/v10\.[6-9]/);
- var lchownSync = (path9, uid, gid) => {
+ var fs8 = require("fs");
+ var path10 = require("path");
+ var LCHOWN = fs8.lchown ? "lchown" : "chown";
+ var LCHOWNSYNC = fs8.lchownSync ? "lchownSync" : "chownSync";
+ var needEISDIRHandled = fs8.lchown && !process.version.match(/v1[1-9]+\./) && !process.version.match(/v10\.[6-9]/);
+ var lchownSync = (path11, uid, gid) => {
try {
- return fs6[LCHOWNSYNC](path9, uid, gid);
+ return fs8[LCHOWNSYNC](path11, uid, gid);
} catch (er) {
if (er.code !== "ENOENT")
throw er;
}
};
- var chownSync = (path9, uid, gid) => {
+ var chownSync = (path11, uid, gid) => {
try {
- return fs6.chownSync(path9, uid, gid);
+ return fs8.chownSync(path11, uid, gid);
} catch (er) {
if (er.code !== "ENOENT")
throw er;
}
};
- var handleEISDIR = needEISDIRHandled ? (path9, uid, gid, cb) => (er) => {
+ var handleEISDIR = needEISDIRHandled ? (path11, uid, gid, cb) => (er) => {
if (!er || er.code !== "EISDIR")
cb(er);
else
- fs6.chown(path9, uid, gid, cb);
+ fs8.chown(path11, uid, gid, cb);
} : (_, __, ___, cb) => cb;
- var handleEISDirSync = needEISDIRHandled ? (path9, uid, gid) => {
+ var handleEISDirSync = needEISDIRHandled ? (path11, uid, gid) => {
try {
- return lchownSync(path9, uid, gid);
+ return lchownSync(path11, uid, gid);
} catch (er) {
if (er.code !== "EISDIR")
throw er;
- chownSync(path9, uid, gid);
+ chownSync(path11, uid, gid);
}
- } : (path9, uid, gid) => lchownSync(path9, uid, gid);
+ } : (path11, uid, gid) => lchownSync(path11, uid, gid);
var nodeVersion = process.version;
- var readdir = (path9, options, cb) => fs6.readdir(path9, options, cb);
- var readdirSync = (path9, options) => fs6.readdirSync(path9, options);
+ var readdir = (path11, options, cb) => fs8.readdir(path11, options, cb);
+ var readdirSync = (path11, options) => fs8.readdirSync(path11, options);
if (/^v4\./.test(nodeVersion))
- readdir = (path9, options, cb) => fs6.readdir(path9, cb);
+ readdir = (path11, options, cb) => fs8.readdir(path11, cb);
var chown = (cpath, uid, gid, cb) => {
- fs6[LCHOWN](cpath, uid, gid, handleEISDIR(cpath, uid, gid, (er) => {
+ fs8[LCHOWN](cpath, uid, gid, handleEISDIR(cpath, uid, gid, (er) => {
cb(er && er.code !== "ENOENT" ? er : null);
}));
};
var chownrKid = (p, child, uid, gid, cb) => {
if (typeof child === "string")
- return fs6.lstat(path8.resolve(p, child), (er, stats) => {
+ return fs8.lstat(path10.resolve(p, child), (er, stats) => {
if (er)
return cb(er.code !== "ENOENT" ? er : null);
stats.name = child;
chownrKid(p, stats, uid, gid, cb);
});
if (child.isDirectory()) {
- chownr(path8.resolve(p, child.name), uid, gid, (er) => {
+ chownr(path10.resolve(p, child.name), uid, gid, (er) => {
if (er)
return cb(er);
- const cpath = path8.resolve(p, child.name);
+ const cpath = path10.resolve(p, child.name);
chown(cpath, uid, gid, cb);
});
} else {
- const cpath = path8.resolve(p, child.name);
+ const cpath = path10.resolve(p, child.name);
chown(cpath, uid, gid, cb);
}
};
@@ -34939,7 +35019,7 @@ var require_chownr = __commonJS({
var chownrKidSync = (p, child, uid, gid) => {
if (typeof child === "string") {
try {
- const stats = fs6.lstatSync(path8.resolve(p, child));
+ const stats = fs8.lstatSync(path10.resolve(p, child));
stats.name = child;
child = stats;
} catch (er) {
@@ -34950,8 +35030,8 @@ var require_chownr = __commonJS({
}
}
if (child.isDirectory())
- chownrSync(path8.resolve(p, child.name), uid, gid);
- handleEISDirSync(path8.resolve(p, child.name), uid, gid);
+ chownrSync(path10.resolve(p, child.name), uid, gid);
+ handleEISDirSync(path10.resolve(p, child.name), uid, gid);
};
var chownrSync = (p, uid, gid) => {
let children;
@@ -34979,14 +35059,14 @@ var require_mkdir = __commonJS({
".yarn/cache/tar-npm-6.1.15-44c3e71720-815c25f881.zip/node_modules/tar/lib/mkdir.js"(exports, module2) {
"use strict";
var mkdirp = require_mkdirp();
- var fs6 = require("fs");
- var path8 = require("path");
+ var fs8 = require("fs");
+ var path10 = require("path");
var chownr = require_chownr();
var normPath = require_normalize_windows_path();
var SymlinkError = class extends Error {
- constructor(symlink, path9) {
+ constructor(symlink, path11) {
super("Cannot extract through symbolic link");
- this.path = path9;
+ this.path = path11;
this.symlink = symlink;
}
get name() {
@@ -34994,9 +35074,9 @@ var require_mkdir = __commonJS({
}
};
var CwdError = class extends Error {
- constructor(path9, code) {
- super(code + ": Cannot cd into '" + path9 + "'");
- this.path = path9;
+ constructor(path11, code) {
+ super(code + ": Cannot cd into '" + path11 + "'");
+ this.path = path11;
this.code = code;
}
get name() {
@@ -35006,7 +35086,7 @@ var require_mkdir = __commonJS({
var cGet = (cache, key) => cache.get(normPath(key));
var cSet = (cache, key, val) => cache.set(normPath(key), val);
var checkCwd = (dir, cb) => {
- fs6.stat(dir, (er, st) => {
+ fs8.stat(dir, (er, st) => {
if (er || !st.isDirectory()) {
er = new CwdError(dir, er && er.code || "ENOTDIR");
}
@@ -35033,7 +35113,7 @@ var require_mkdir = __commonJS({
if (created && doChown) {
chownr(created, uid, gid, (er2) => done(er2));
} else if (needChmod) {
- fs6.chmod(dir, mode, cb);
+ fs8.chmod(dir, mode, cb);
} else {
cb();
}
@@ -35048,7 +35128,7 @@ var require_mkdir = __commonJS({
if (preserve) {
return mkdirp(dir, { mode }).then((made) => done(null, made), done);
}
- const sub = normPath(path8.relative(cwd, dir));
+ const sub = normPath(path10.relative(cwd, dir));
const parts = sub.split("/");
mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done);
};
@@ -35057,26 +35137,26 @@ var require_mkdir = __commonJS({
return cb(null, created);
}
const p = parts.shift();
- const part = normPath(path8.resolve(base + "/" + p));
+ const part = normPath(path10.resolve(base + "/" + p));
if (cGet(cache, part)) {
return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
}
- fs6.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
+ fs8.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
};
var onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) => {
if (er) {
- fs6.lstat(part, (statEr, st) => {
+ fs8.lstat(part, (statEr, st) => {
if (statEr) {
statEr.path = statEr.path && normPath(statEr.path);
cb(statEr);
} else if (st.isDirectory()) {
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
} else if (unlink) {
- fs6.unlink(part, (er2) => {
+ fs8.unlink(part, (er2) => {
if (er2) {
return cb(er2);
}
- fs6.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
+ fs8.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
});
} else if (st.isSymbolicLink()) {
return cb(new SymlinkError(part, part + "/" + parts.join("/")));
@@ -35093,7 +35173,7 @@ var require_mkdir = __commonJS({
let ok = false;
let code = "ENOTDIR";
try {
- ok = fs6.statSync(dir).isDirectory();
+ ok = fs8.statSync(dir).isDirectory();
} catch (er) {
code = er.code;
} finally {
@@ -35120,7 +35200,7 @@ var require_mkdir = __commonJS({
chownr.sync(created2, uid, gid);
}
if (needChmod) {
- fs6.chmodSync(dir, mode);
+ fs8.chmodSync(dir, mode);
}
};
if (cache && cGet(cache, dir) === true) {
@@ -35133,26 +35213,26 @@ var require_mkdir = __commonJS({
if (preserve) {
return done(mkdirp.sync(dir, mode));
}
- const sub = normPath(path8.relative(cwd, dir));
+ const sub = normPath(path10.relative(cwd, dir));
const parts = sub.split("/");
let created = null;
for (let p = parts.shift(), part = cwd; p && (part += "/" + p); p = parts.shift()) {
- part = normPath(path8.resolve(part));
+ part = normPath(path10.resolve(part));
if (cGet(cache, part)) {
continue;
}
try {
- fs6.mkdirSync(part, mode);
+ fs8.mkdirSync(part, mode);
created = created || part;
cSet(cache, part, true);
} catch (er) {
- const st = fs6.lstatSync(part);
+ const st = fs8.lstatSync(part);
if (st.isDirectory()) {
cSet(cache, part, true);
continue;
} else if (unlink) {
- fs6.unlinkSync(part);
- fs6.mkdirSync(part, mode);
+ fs8.unlinkSync(part);
+ fs8.mkdirSync(part, mode);
created = created || part;
cSet(cache, part, true);
continue;
@@ -35192,12 +35272,12 @@ var require_path_reservations = __commonJS({
module2.exports = () => {
const queues = /* @__PURE__ */ new Map();
const reservations = /* @__PURE__ */ new Map();
- const getDirs = (path8) => {
- const dirs = path8.split("/").slice(0, -1).reduce((set, path9) => {
+ const getDirs = (path10) => {
+ const dirs = path10.split("/").slice(0, -1).reduce((set, path11) => {
if (set.length) {
- path9 = join2(set[set.length - 1], path9);
+ path11 = join2(set[set.length - 1], path11);
}
- set.push(path9 || "/");
+ set.push(path11 || "/");
return set;
}, []);
return dirs;
@@ -35209,8 +35289,8 @@ var require_path_reservations = __commonJS({
throw new Error("function does not have any path reservations");
}
return {
- paths: res.paths.map((path8) => queues.get(path8)),
- dirs: [...res.dirs].map((path8) => queues.get(path8))
+ paths: res.paths.map((path10) => queues.get(path10)),
+ dirs: [...res.dirs].map((path10) => queues.get(path10))
};
};
const check = (fn2) => {
@@ -35231,11 +35311,11 @@ var require_path_reservations = __commonJS({
}
const { paths, dirs } = reservations.get(fn2);
const next = /* @__PURE__ */ new Set();
- paths.forEach((path8) => {
- const q = queues.get(path8);
+ paths.forEach((path10) => {
+ const q = queues.get(path10);
assert2.equal(q[0], fn2);
if (q.length === 1) {
- queues.delete(path8);
+ queues.delete(path10);
} else {
q.shift();
if (typeof q[0] === "function") {
@@ -35266,13 +35346,13 @@ var require_path_reservations = __commonJS({
return stripSlashes(join2(normalize(p))).toLowerCase();
});
const dirs = new Set(
- paths.map((path8) => getDirs(path8)).reduce((a, b) => a.concat(b))
+ paths.map((path10) => getDirs(path10)).reduce((a, b) => a.concat(b))
);
reservations.set(fn2, { dirs, paths });
- paths.forEach((path8) => {
- const q = queues.get(path8);
+ paths.forEach((path10) => {
+ const q = queues.get(path10);
if (!q) {
- queues.set(path8, [fn2]);
+ queues.set(path10, [fn2]);
} else {
q.push(fn2);
}
@@ -35299,8 +35379,8 @@ var require_get_write_flag = __commonJS({
".yarn/cache/tar-npm-6.1.15-44c3e71720-815c25f881.zip/node_modules/tar/lib/get-write-flag.js"(exports, module2) {
var platform = process.env.__FAKE_PLATFORM__ || process.platform;
var isWindows = platform === "win32";
- var fs6 = global.__FAKE_TESTING_FS__ || require("fs");
- var { O_CREAT, O_TRUNC, O_WRONLY, UV_FS_O_FILEMAP = 0 } = fs6.constants;
+ var fs8 = global.__FAKE_TESTING_FS__ || require("fs");
+ var { O_CREAT, O_TRUNC, O_WRONLY, UV_FS_O_FILEMAP = 0 } = fs8.constants;
var fMapEnabled = isWindows && !!UV_FS_O_FILEMAP;
var fMapLimit = 512 * 1024;
var fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY;
@@ -35314,10 +35394,10 @@ var require_unpack = __commonJS({
"use strict";
var assert2 = require("assert");
var Parser = require_parse2();
- var fs6 = require("fs");
+ var fs8 = require("fs");
var fsm = require_fs_minipass();
- var path8 = require("path");
- var mkdir3 = require_mkdir();
+ var path10 = require("path");
+ var mkdir4 = require_mkdir();
var wc = require_winchars();
var pathReservations = require_path_reservations();
var stripAbsolutePath = require_strip_absolute_path();
@@ -35353,34 +35433,34 @@ var require_unpack = __commonJS({
var getFlag = require_get_write_flag();
var platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
var isWindows = platform === "win32";
- var unlinkFile = (path9, cb) => {
+ var unlinkFile = (path11, cb) => {
if (!isWindows) {
- return fs6.unlink(path9, cb);
+ return fs8.unlink(path11, cb);
}
- const name = path9 + ".DELETE." + crypto.randomBytes(16).toString("hex");
- fs6.rename(path9, name, (er) => {
+ const name = path11 + ".DELETE." + crypto.randomBytes(16).toString("hex");
+ fs8.rename(path11, name, (er) => {
if (er) {
return cb(er);
}
- fs6.unlink(name, cb);
+ fs8.unlink(name, cb);
});
};
- var unlinkFileSync = (path9) => {
+ var unlinkFileSync = (path11) => {
if (!isWindows) {
- return fs6.unlinkSync(path9);
+ return fs8.unlinkSync(path11);
}
- const name = path9 + ".DELETE." + crypto.randomBytes(16).toString("hex");
- fs6.renameSync(path9, name);
- fs6.unlinkSync(name);
+ const name = path11 + ".DELETE." + crypto.randomBytes(16).toString("hex");
+ fs8.renameSync(path11, name);
+ fs8.unlinkSync(name);
};
var uint32 = (a, b, c) => a === a >>> 0 ? a : b === b >>> 0 ? b : c;
- var cacheKeyNormalize = (path9) => stripSlash(normPath(normalize(path9))).toLowerCase();
+ var cacheKeyNormalize = (path11) => stripSlash(normPath(normalize(path11))).toLowerCase();
var pruneCache = (cache, abs) => {
abs = cacheKeyNormalize(abs);
- for (const path9 of cache.keys()) {
- const pnorm = cacheKeyNormalize(path9);
+ for (const path11 of cache.keys()) {
+ const pnorm = cacheKeyNormalize(path11);
if (pnorm === abs || pnorm.indexOf(abs + "/") === 0) {
- cache.delete(path9);
+ cache.delete(path11);
}
}
};
@@ -35438,7 +35518,7 @@ var require_unpack = __commonJS({
this.noMtime = !!opt.noMtime;
this.preservePaths = !!opt.preservePaths;
this.unlink = !!opt.unlink;
- this.cwd = normPath(path8.resolve(opt.cwd || process.cwd()));
+ this.cwd = normPath(path10.resolve(opt.cwd || process.cwd()));
this.strip = +opt.strip || 0;
this.processUmask = opt.noChmod ? 0 : process.umask();
this.umask = typeof opt.umask === "number" ? opt.umask : this.processUmask;
@@ -35497,10 +35577,10 @@ var require_unpack = __commonJS({
});
}
}
- if (path8.isAbsolute(entry.path)) {
- entry.absolute = normPath(path8.resolve(entry.path));
+ if (path10.isAbsolute(entry.path)) {
+ entry.absolute = normPath(path10.resolve(entry.path));
} else {
- entry.absolute = normPath(path8.resolve(this.cwd, entry.path));
+ entry.absolute = normPath(path10.resolve(this.cwd, entry.path));
}
if (!this.preservePaths && entry.absolute.indexOf(this.cwd + "/") !== 0 && entry.absolute !== this.cwd) {
this.warn("TAR_ENTRY_ERROR", "path escaped extraction target", {
@@ -35515,9 +35595,9 @@ var require_unpack = __commonJS({
return false;
}
if (this.win32) {
- const { root: aRoot } = path8.win32.parse(entry.absolute);
+ const { root: aRoot } = path10.win32.parse(entry.absolute);
entry.absolute = aRoot + wc.encode(entry.absolute.slice(aRoot.length));
- const { root: pRoot } = path8.win32.parse(entry.path);
+ const { root: pRoot } = path10.win32.parse(entry.path);
entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length));
}
return true;
@@ -35556,7 +35636,7 @@ var require_unpack = __commonJS({
}
}
[MKDIR](dir, mode, cb) {
- mkdir3(normPath(dir), {
+ mkdir4(normPath(dir), {
uid: this.uid,
gid: this.gid,
processUid: this.processUid,
@@ -35588,7 +35668,7 @@ var require_unpack = __commonJS({
});
stream.on("error", (er) => {
if (stream.fd) {
- fs6.close(stream.fd, () => {
+ fs8.close(stream.fd, () => {
});
}
stream.write = () => true;
@@ -35599,7 +35679,7 @@ var require_unpack = __commonJS({
const done = (er) => {
if (er) {
if (stream.fd) {
- fs6.close(stream.fd, () => {
+ fs8.close(stream.fd, () => {
});
}
this[ONERROR](er, entry);
@@ -35607,7 +35687,7 @@ var require_unpack = __commonJS({
return;
}
if (--actions === 0) {
- fs6.close(stream.fd, (er2) => {
+ fs8.close(stream.fd, (er2) => {
if (er2) {
this[ONERROR](er2, entry);
} else {
@@ -35624,13 +35704,13 @@ var require_unpack = __commonJS({
actions++;
const atime = entry.atime || /* @__PURE__ */ new Date();
const mtime = entry.mtime;
- fs6.futimes(fd, atime, mtime, (er) => er ? fs6.utimes(abs, atime, mtime, (er2) => done(er2 && er)) : done());
+ fs8.futimes(fd, atime, mtime, (er) => er ? fs8.utimes(abs, atime, mtime, (er2) => done(er2 && er)) : done());
}
if (this[DOCHOWN](entry)) {
actions++;
const uid = this[UID](entry);
const gid = this[GID](entry);
- fs6.fchown(fd, uid, gid, (er) => er ? fs6.chown(abs, uid, gid, (er2) => done(er2 && er)) : done());
+ fs8.fchown(fd, uid, gid, (er) => er ? fs8.chown(abs, uid, gid, (er2) => done(er2 && er)) : done());
}
done();
});
@@ -35662,11 +35742,11 @@ var require_unpack = __commonJS({
};
if (entry.mtime && !this.noMtime) {
actions++;
- fs6.utimes(entry.absolute, entry.atime || /* @__PURE__ */ new Date(), entry.mtime, done);
+ fs8.utimes(entry.absolute, entry.atime || /* @__PURE__ */ new Date(), entry.mtime, done);
}
if (this[DOCHOWN](entry)) {
actions++;
- fs6.chown(entry.absolute, this[UID](entry), this[GID](entry), done);
+ fs8.chown(entry.absolute, this[UID](entry), this[GID](entry), done);
}
done();
});
@@ -35684,7 +35764,7 @@ var require_unpack = __commonJS({
this[LINK](entry, entry.linkpath, "symlink", done);
}
[HARDLINK](entry, done) {
- const linkpath = normPath(path8.resolve(this.cwd, entry.linkpath));
+ const linkpath = normPath(path10.resolve(this.cwd, entry.linkpath));
this[LINK](entry, linkpath, "link", done);
}
[PEND]() {
@@ -35739,7 +35819,7 @@ var require_unpack = __commonJS({
};
const start = () => {
if (entry.absolute !== this.cwd) {
- const parent = normPath(path8.dirname(entry.absolute));
+ const parent = normPath(path10.dirname(entry.absolute));
if (parent !== this.cwd) {
return this[MKDIR](parent, this.dmode, (er) => {
if (er) {
@@ -35754,7 +35834,7 @@ var require_unpack = __commonJS({
afterMakeParent();
};
const afterMakeParent = () => {
- fs6.lstat(entry.absolute, (lstatEr, st) => {
+ fs8.lstat(entry.absolute, (lstatEr, st) => {
if (st && (this.keep || this.newer && st.mtime > entry.mtime)) {
this[SKIP](entry);
done();
@@ -35770,10 +35850,10 @@ var require_unpack = __commonJS({
if (!needChmod) {
return afterChmod();
}
- return fs6.chmod(entry.absolute, entry.mode, afterChmod);
+ return fs8.chmod(entry.absolute, entry.mode, afterChmod);
}
if (entry.absolute !== this.cwd) {
- return fs6.rmdir(entry.absolute, (er) => this[MAKEFS](er, entry, done));
+ return fs8.rmdir(entry.absolute, (er) => this[MAKEFS](er, entry, done));
}
}
if (entry.absolute === this.cwd) {
@@ -35809,7 +35889,7 @@ var require_unpack = __commonJS({
}
}
[LINK](entry, linkpath, link, done) {
- fs6[link](linkpath, entry.absolute, (er) => {
+ fs8[link](linkpath, entry.absolute, (er) => {
if (er) {
this[ONERROR](er, entry);
} else {
@@ -35842,7 +35922,7 @@ var require_unpack = __commonJS({
this[CHECKED_CWD] = true;
}
if (entry.absolute !== this.cwd) {
- const parent = normPath(path8.dirname(entry.absolute));
+ const parent = normPath(path10.dirname(entry.absolute));
if (parent !== this.cwd) {
const mkParent = this[MKDIR](parent, this.dmode);
if (mkParent) {
@@ -35850,7 +35930,7 @@ var require_unpack = __commonJS({
}
}
}
- const [lstatEr, st] = callSync(() => fs6.lstatSync(entry.absolute));
+ const [lstatEr, st] = callSync(() => fs8.lstatSync(entry.absolute));
if (st && (this.keep || this.newer && st.mtime > entry.mtime)) {
return this[SKIP](entry);
}
@@ -35861,11 +35941,11 @@ var require_unpack = __commonJS({
if (entry.type === "Directory") {
const needChmod = !this.noChmod && entry.mode && (st.mode & 4095) !== entry.mode;
const [er3] = needChmod ? callSync(() => {
- fs6.chmodSync(entry.absolute, entry.mode);
+ fs8.chmodSync(entry.absolute, entry.mode);
}) : [];
return this[MAKEFS](er3, entry);
}
- const [er2] = callSync(() => fs6.rmdirSync(entry.absolute));
+ const [er2] = callSync(() => fs8.rmdirSync(entry.absolute));
this[MAKEFS](er2, entry);
}
const [er] = entry.absolute === this.cwd ? [] : callSync(() => unlinkFileSync(entry.absolute));
@@ -35876,7 +35956,7 @@ var require_unpack = __commonJS({
const oner = (er) => {
let closeError;
try {
- fs6.closeSync(fd);
+ fs8.closeSync(fd);
} catch (e) {
closeError = e;
}
@@ -35887,7 +35967,7 @@ var require_unpack = __commonJS({
};
let fd;
try {
- fd = fs6.openSync(entry.absolute, getFlag(entry.size), mode);
+ fd = fs8.openSync(entry.absolute, getFlag(entry.size), mode);
} catch (er) {
return oner(er);
}
@@ -35898,7 +35978,7 @@ var require_unpack = __commonJS({
}
tx.on("data", (chunk) => {
try {
- fs6.writeSync(fd, chunk, 0, chunk.length);
+ fs8.writeSync(fd, chunk, 0, chunk.length);
} catch (er) {
oner(er);
}
@@ -35909,10 +35989,10 @@ var require_unpack = __commonJS({
const atime = entry.atime || /* @__PURE__ */ new Date();
const mtime = entry.mtime;
try {
- fs6.futimesSync(fd, atime, mtime);
+ fs8.futimesSync(fd, atime, mtime);
} catch (futimeser) {
try {
- fs6.utimesSync(entry.absolute, atime, mtime);
+ fs8.utimesSync(entry.absolute, atime, mtime);
} catch (utimeser) {
er = futimeser;
}
@@ -35922,10 +36002,10 @@ var require_unpack = __commonJS({
const uid = this[UID](entry);
const gid = this[GID](entry);
try {
- fs6.fchownSync(fd, uid, gid);
+ fs8.fchownSync(fd, uid, gid);
} catch (fchowner) {
try {
- fs6.chownSync(entry.absolute, uid, gid);
+ fs8.chownSync(entry.absolute, uid, gid);
} catch (chowner) {
er = er || fchowner;
}
@@ -35944,13 +36024,13 @@ var require_unpack = __commonJS({
}
if (entry.mtime && !this.noMtime) {
try {
- fs6.utimesSync(entry.absolute, entry.atime || /* @__PURE__ */ new Date(), entry.mtime);
+ fs8.utimesSync(entry.absolute, entry.atime || /* @__PURE__ */ new Date(), entry.mtime);
} catch (er2) {
}
}
if (this[DOCHOWN](entry)) {
try {
- fs6.chownSync(entry.absolute, this[UID](entry), this[GID](entry));
+ fs8.chownSync(entry.absolute, this[UID](entry), this[GID](entry));
} catch (er2) {
}
}
@@ -35959,7 +36039,7 @@ var require_unpack = __commonJS({
}
[MKDIR](dir, mode) {
try {
- return mkdir3.sync(normPath(dir), {
+ return mkdir4.sync(normPath(dir), {
uid: this.uid,
gid: this.gid,
processUid: this.processUid,
@@ -35977,7 +36057,7 @@ var require_unpack = __commonJS({
}
[LINK](entry, linkpath, link, done) {
try {
- fs6[link + "Sync"](linkpath, entry.absolute);
+ fs8[link + "Sync"](linkpath, entry.absolute);
done();
entry.resume();
} catch (er) {
@@ -35996,9 +36076,9 @@ var require_extract = __commonJS({
"use strict";
var hlo = require_high_level_opt();
var Unpack = require_unpack();
- var fs6 = require("fs");
+ var fs8 = require("fs");
var fsm = require_fs_minipass();
- var path8 = require("path");
+ var path10 = require("path");
var stripSlash = require_strip_trailing_slashes();
module2.exports = (opt_, files, cb) => {
if (typeof opt_ === "function") {
@@ -36030,8 +36110,8 @@ var require_extract = __commonJS({
const map = new Map(files.map((f) => [stripSlash(f), true]));
const filter = opt.filter;
const mapHas = (file, r) => {
- const root = r || path8.parse(file).root || ".";
- const ret = file === root ? false : map.has(file) ? map.get(file) : mapHas(path8.dirname(file), root);
+ const root = r || path10.parse(file).root || ".";
+ const ret = file === root ? false : map.has(file) ? map.get(file) : mapHas(path10.dirname(file), root);
map.set(file, ret);
return ret;
};
@@ -36040,7 +36120,7 @@ var require_extract = __commonJS({
var extractFileSync = (opt) => {
const u = new Unpack.Sync(opt);
const file = opt.file;
- const stat = fs6.statSync(file);
+ const stat = fs8.statSync(file);
const readSize = opt.maxReadSize || 16 * 1024 * 1024;
const stream = new fsm.ReadStreamSync(file, {
readSize,
@@ -36055,7 +36135,7 @@ var require_extract = __commonJS({
const p = new Promise((resolve, reject) => {
u.on("error", reject);
u.on("close", resolve);
- fs6.stat(file, (er, stat) => {
+ fs8.stat(file, (er, stat) => {
if (er) {
reject(er);
} else {
@@ -36101,17 +36181,17 @@ var require_v8_compile_cache = __commonJS({
"use strict";
var Module2 = require("module");
var crypto = require("crypto");
- var fs6 = require("fs");
- var path8 = require("path");
+ var fs8 = require("fs");
+ var path10 = require("path");
var vm = require("vm");
- var os2 = require("os");
+ var os3 = require("os");
var hasOwnProperty = Object.prototype.hasOwnProperty;
var FileSystemBlobStore = class {
constructor(directory, prefix) {
const name = prefix ? slashEscape(prefix + ".") : "";
- this._blobFilename = path8.join(directory, name + "BLOB");
- this._mapFilename = path8.join(directory, name + "MAP");
- this._lockFilename = path8.join(directory, name + "LOCK");
+ this._blobFilename = path10.join(directory, name + "BLOB");
+ this._mapFilename = path10.join(directory, name + "MAP");
+ this._lockFilename = path10.join(directory, name + "LOCK");
this._directory = directory;
this._load();
}
@@ -36163,22 +36243,22 @@ var require_v8_compile_cache = __commonJS({
const mapToStore = JSON.stringify(dump[1]);
try {
mkdirpSync(this._directory);
- fs6.writeFileSync(this._lockFilename, "LOCK", { flag: "wx" });
+ fs8.writeFileSync(this._lockFilename, "LOCK", { flag: "wx" });
} catch (error) {
return false;
}
try {
- fs6.writeFileSync(this._blobFilename, blobToStore);
- fs6.writeFileSync(this._mapFilename, mapToStore);
+ fs8.writeFileSync(this._blobFilename, blobToStore);
+ fs8.writeFileSync(this._mapFilename, mapToStore);
} finally {
- fs6.unlinkSync(this._lockFilename);
+ fs8.unlinkSync(this._lockFilename);
}
return true;
}
_load() {
try {
- this._storedBlob = fs6.readFileSync(this._blobFilename);
- this._storedMap = JSON.parse(fs6.readFileSync(this._mapFilename));
+ this._storedBlob = fs8.readFileSync(this._blobFilename);
+ this._storedMap = JSON.parse(fs8.readFileSync(this._mapFilename));
} catch (e) {
this._storedBlob = Buffer.alloc(0);
this._storedMap = {};
@@ -36240,7 +36320,7 @@ var require_v8_compile_cache = __commonJS({
require2.main = process.mainModule;
require2.extensions = Module2._extensions;
require2.cache = Module2._cache;
- const dirname = path8.dirname(filename);
+ const dirname = path10.dirname(filename);
const compiledWrapper = self2._moduleCompile(filename, content);
const args = [mod.exports, require2, mod, filename, dirname, process, global, Buffer];
return compiledWrapper.apply(mod.exports, args);
@@ -36295,18 +36375,18 @@ var require_v8_compile_cache = __commonJS({
}
};
function mkdirpSync(p_) {
- _mkdirpSync(path8.resolve(p_), 511);
+ _mkdirpSync(path10.resolve(p_), 511);
}
function _mkdirpSync(p, mode) {
try {
- fs6.mkdirSync(p, mode);
+ fs8.mkdirSync(p, mode);
} catch (err0) {
if (err0.code === "ENOENT") {
- _mkdirpSync(path8.dirname(p));
+ _mkdirpSync(path10.dirname(p));
_mkdirpSync(p);
} else {
try {
- const stat = fs6.statSync(p);
+ const stat = fs8.statSync(p);
if (!stat.isDirectory()) {
throw err0;
}
@@ -36338,7 +36418,7 @@ var require_v8_compile_cache = __commonJS({
}
const dirname = typeof process.getuid === "function" ? "v8-compile-cache-" + process.getuid() : "v8-compile-cache";
const version2 = typeof process.versions.v8 === "string" ? process.versions.v8 : typeof process.versions.chakracore === "string" ? "chakracore-" + process.versions.chakracore : "node-" + process.version;
- const cacheDir = path8.join(os2.tmpdir(), dirname, version2);
+ const cacheDir = path10.join(os3.tmpdir(), dirname, version2);
return cacheDir;
}
function getMainName() {
@@ -36376,8 +36456,8 @@ var require_windows = __commonJS({
".yarn/cache/isexe-npm-2.0.0-b58870bd2e-b37fe0a798.zip/node_modules/isexe/windows.js"(exports, module2) {
module2.exports = isexe;
isexe.sync = sync;
- var fs6 = require("fs");
- function checkPathExt(path8, options) {
+ var fs8 = require("fs");
+ function checkPathExt(path10, options) {
var pathext = options.pathExt !== void 0 ? options.pathExt : process.env.PATHEXT;
if (!pathext) {
return true;
@@ -36388,25 +36468,25 @@ var require_windows = __commonJS({
}
for (var i = 0; i < pathext.length; i++) {
var p = pathext[i].toLowerCase();
- if (p && path8.substr(-p.length).toLowerCase() === p) {
+ if (p && path10.substr(-p.length).toLowerCase() === p) {
return true;
}
}
return false;
}
- function checkStat(stat, path8, options) {
+ function checkStat(stat, path10, options) {
if (!stat.isSymbolicLink() && !stat.isFile()) {
return false;
}
- return checkPathExt(path8, options);
+ return checkPathExt(path10, options);
}
- function isexe(path8, options, cb) {
- fs6.stat(path8, function(er, stat) {
- cb(er, er ? false : checkStat(stat, path8, options));
+ function isexe(path10, options, cb) {
+ fs8.stat(path10, function(er, stat) {
+ cb(er, er ? false : checkStat(stat, path10, options));
});
}
- function sync(path8, options) {
- return checkStat(fs6.statSync(path8), path8, options);
+ function sync(path10, options) {
+ return checkStat(fs8.statSync(path10), path10, options);
}
}
});
@@ -36416,14 +36496,14 @@ var require_mode = __commonJS({
".yarn/cache/isexe-npm-2.0.0-b58870bd2e-b37fe0a798.zip/node_modules/isexe/mode.js"(exports, module2) {
module2.exports = isexe;
isexe.sync = sync;
- var fs6 = require("fs");
- function isexe(path8, options, cb) {
- fs6.stat(path8, function(er, stat) {
+ var fs8 = require("fs");
+ function isexe(path10, options, cb) {
+ fs8.stat(path10, function(er, stat) {
cb(er, er ? false : checkStat(stat, options));
});
}
- function sync(path8, options) {
- return checkStat(fs6.statSync(path8), options);
+ function sync(path10, options) {
+ return checkStat(fs8.statSync(path10), options);
}
function checkStat(stat, options) {
return stat.isFile() && checkMode(stat, options);
@@ -36447,7 +36527,7 @@ var require_mode = __commonJS({
// .yarn/cache/isexe-npm-2.0.0-b58870bd2e-b37fe0a798.zip/node_modules/isexe/index.js
var require_isexe = __commonJS({
".yarn/cache/isexe-npm-2.0.0-b58870bd2e-b37fe0a798.zip/node_modules/isexe/index.js"(exports, module2) {
- var fs6 = require("fs");
+ var fs8 = require("fs");
var core;
if (process.platform === "win32" || global.TESTING_WINDOWS) {
core = require_windows();
@@ -36456,7 +36536,7 @@ var require_isexe = __commonJS({
}
module2.exports = isexe;
isexe.sync = sync;
- function isexe(path8, options, cb) {
+ function isexe(path10, options, cb) {
if (typeof options === "function") {
cb = options;
options = {};
@@ -36466,7 +36546,7 @@ var require_isexe = __commonJS({
throw new TypeError("callback not provided");
}
return new Promise(function(resolve, reject) {
- isexe(path8, options || {}, function(er, is) {
+ isexe(path10, options || {}, function(er, is) {
if (er) {
reject(er);
} else {
@@ -36475,7 +36555,7 @@ var require_isexe = __commonJS({
});
});
}
- core(path8, options || {}, function(er, is) {
+ core(path10, options || {}, function(er, is) {
if (er) {
if (er.code === "EACCES" || options && options.ignoreErrors) {
er = null;
@@ -36485,9 +36565,9 @@ var require_isexe = __commonJS({
cb(er, is);
});
}
- function sync(path8, options) {
+ function sync(path10, options) {
try {
- return core.sync(path8, options || {});
+ return core.sync(path10, options || {});
} catch (er) {
if (options && options.ignoreErrors || er.code === "EACCES") {
return false;
@@ -36621,10 +36701,10 @@ var require_is_windows = __commonJS({
var require_cmd_extension = __commonJS({
".yarn/cache/cmd-extension-npm-1.0.2-11aa204c4b-c0f4db69b5.zip/node_modules/cmd-extension/index.js"(exports, module2) {
"use strict";
- var path8 = require("path");
+ var path10 = require("path");
var cmdExtension;
if (process.env.PATHEXT) {
- cmdExtension = process.env.PATHEXT.split(path8.delimiter).find((ext) => ext.toUpperCase() === ".CMD");
+ cmdExtension = process.env.PATHEXT.split(path10.delimiter).find((ext) => ext.toUpperCase() === ".CMD");
}
module2.exports = cmdExtension || ".cmd";
}
@@ -36636,7 +36716,7 @@ var require_cmd_shim = __commonJS({
"use strict";
cmdShim2.ifExists = cmdShimIfExists;
var util_1 = require("util");
- var path8 = require("path");
+ var path10 = require("path");
var isWindows = require_is_windows();
var CMD_EXTENSION = require_cmd_extension();
var shebangExpr = /^#!\s*(?:\/usr\/bin\/env(?:\s+-S\s*)?)?\s*([^ \t]+)(.*)$/;
@@ -36657,15 +36737,15 @@ var require_cmd_shim = __commonJS({
]);
function ingestOptions(opts) {
const opts_ = { ...DEFAULT_OPTIONS, ...opts };
- const fs6 = opts_.fs;
+ const fs8 = opts_.fs;
opts_.fs_ = {
- chmod: fs6.chmod ? (0, util_1.promisify)(fs6.chmod) : async () => {
+ chmod: fs8.chmod ? (0, util_1.promisify)(fs8.chmod) : async () => {
},
- mkdir: (0, util_1.promisify)(fs6.mkdir),
- readFile: (0, util_1.promisify)(fs6.readFile),
- stat: (0, util_1.promisify)(fs6.stat),
- unlink: (0, util_1.promisify)(fs6.unlink),
- writeFile: (0, util_1.promisify)(fs6.writeFile)
+ mkdir: (0, util_1.promisify)(fs8.mkdir),
+ readFile: (0, util_1.promisify)(fs8.readFile),
+ stat: (0, util_1.promisify)(fs8.stat),
+ unlink: (0, util_1.promisify)(fs8.unlink),
+ writeFile: (0, util_1.promisify)(fs8.writeFile)
};
return opts_;
}
@@ -36677,8 +36757,8 @@ var require_cmd_shim = __commonJS({
return cmdShim2(src, to, opts).catch(() => {
});
}
- function rm2(path9, opts) {
- return opts.fs_.unlink(path9).catch(() => {
+ function rm2(path11, opts) {
+ return opts.fs_.unlink(path11).catch(() => {
});
}
async function cmdShim_(src, to, opts) {
@@ -36687,7 +36767,7 @@ var require_cmd_shim = __commonJS({
return writeAllShims(src, to, srcRuntimeInfo, opts);
}
function writeShimsPreCommon(target, opts) {
- return opts.fs_.mkdir(path8.dirname(target), { recursive: true });
+ return opts.fs_.mkdir(path10.dirname(target), { recursive: true });
}
function writeAllShims(src, to, srcRuntimeInfo, opts) {
const opts_ = ingestOptions(opts);
@@ -36712,7 +36792,7 @@ var require_cmd_shim = __commonJS({
const firstLine = data.trim().split(/\r*\n/)[0];
const shebang = firstLine.match(shebangExpr);
if (!shebang) {
- const targetExtension = path8.extname(target).toLowerCase();
+ const targetExtension = path10.extname(target).toLowerCase();
return {
// undefined if extension is unknown but it's converted to null.
program: extensionToProgramMap.get(targetExtension) || null,
@@ -36738,7 +36818,7 @@ var require_cmd_shim = __commonJS({
function getExeExtension() {
let cmdExtension;
if (process.env.PATHEXT) {
- cmdExtension = process.env.PATHEXT.split(path8.delimiter).find((ext) => ext.toLowerCase() === ".exe");
+ cmdExtension = process.env.PATHEXT.split(path10.delimiter).find((ext) => ext.toLowerCase() === ".exe");
}
return cmdExtension || ".exe";
}
@@ -36754,9 +36834,9 @@ var require_cmd_shim = __commonJS({
return writeShimPost(to, opts);
}
function generateCmdShim(src, to, opts) {
- const shTarget = path8.relative(path8.dirname(to), src);
+ const shTarget = path10.relative(path10.dirname(to), src);
let target = shTarget.split("/").join("\\");
- const quotedPathToTarget = path8.isAbsolute(target) ? `"${target}"` : `"%~dp0\\${target}"`;
+ const quotedPathToTarget = path10.isAbsolute(target) ? `"${target}"` : `"%~dp0\\${target}"`;
let longProg;
let prog = opts.prog;
let args = opts.args || "";
@@ -36802,11 +36882,11 @@ var require_cmd_shim = __commonJS({
return cmd;
}
function generateShShim(src, to, opts) {
- let shTarget = path8.relative(path8.dirname(to), src);
+ let shTarget = path10.relative(path10.dirname(to), src);
let shProg = opts.prog && opts.prog.split("\\").join("/");
let shLongProg;
shTarget = shTarget.split("\\").join("/");
- const quotedPathToTarget = path8.isAbsolute(shTarget) ? `"${shTarget}"` : `"$basedir/${shTarget}"`;
+ const quotedPathToTarget = path10.isAbsolute(shTarget) ? `"${shTarget}"` : `"$basedir/${shTarget}"`;
let args = opts.args || "";
const shNodePath = normalizePathEnvVar(opts.nodePath).posix;
if (!shProg) {
@@ -36856,12 +36936,12 @@ exit $?
return sh;
}
function generatePwshShim(src, to, opts) {
- let shTarget = path8.relative(path8.dirname(to), src);
+ let shTarget = path10.relative(path10.dirname(to), src);
const shProg = opts.prog && opts.prog.split("\\").join("/");
let pwshProg = shProg && `"${shProg}$exe"`;
let pwshLongProg;
shTarget = shTarget.split("\\").join("/");
- const quotedPathToTarget = path8.isAbsolute(shTarget) ? `"${shTarget}"` : `"$basedir/${shTarget}"`;
+ const quotedPathToTarget = path10.isAbsolute(shTarget) ? `"${shTarget}"` : `"$basedir/${shTarget}"`;
let args = opts.args || "";
let normalizedNodePathEnvVar = normalizePathEnvVar(opts.nodePath);
const nodePath = normalizedNodePathEnvVar.win32;
@@ -36959,7 +37039,7 @@ ${nodePath ? "$env:NODE_PATH=$env_node_path\n" : ""}${prependPath ? "$env:PATH=$
posix: ""
};
}
- let split = typeof nodePath === "string" ? nodePath.split(path8.delimiter) : Array.from(nodePath);
+ let split = typeof nodePath === "string" ? nodePath.split(path10.delimiter) : Array.from(nodePath);
let result = {};
for (let i = 0; i < split.length; i++) {
const win32 = split[i].split("/").join("\\");
@@ -37140,10 +37220,10 @@ function cleanValidationError(message, { mergeName = false } = {}) {
const match = message.match(/^([^:]+): (.*)$/m);
if (!match)
return `validation failed`;
- let [, path8, line] = match;
+ let [, path10, line] = match;
if (mergeName)
line = line[0].toLowerCase() + line.slice(1);
- line = path8 !== `.` || !mergeName ? `${path8.replace(/^\.(\[|$)/, `$1`)}: ${line}` : `: ${line}`;
+ line = path10 !== `.` || !mergeName ? `${path10.replace(/^\.(\[|$)/, `$1`)}: ${line}` : `: ${line}`;
return line;
}
function formatError(message, errors) {
@@ -37729,8 +37809,8 @@ var CommandBuilder = class {
this.cliIndex = cliIndex;
this.cliOpts = cliOpts;
}
- addPath(path8) {
- this.paths.push(path8);
+ addPath(path10) {
+ this.paths.push(path10);
}
setArity({ leading = this.arity.leading, trailing = this.arity.trailing, extra = this.arity.extra, proxy = this.arity.proxy }) {
Object.assign(this.arity, { leading, trailing, extra, proxy });
@@ -37814,17 +37894,17 @@ var CommandBuilder = class {
registerStatic(machine, NODE_INITIAL, START_OF_INPUT, firstNode, [`setCandidateState`, { candidateUsage, requiredOptions }]);
const positionalArgument = this.arity.proxy ? `always` : `isNotOptionLike`;
const paths = this.paths.length > 0 ? this.paths : [[]];
- for (const path8 of paths) {
+ for (const path10 of paths) {
let lastPathNode = firstNode;
- if (path8.length > 0) {
+ if (path10.length > 0) {
const optionPathNode = injectNode(machine, makeNode());
registerShortcut(machine, lastPathNode, optionPathNode);
this.registerOptions(machine, optionPathNode);
lastPathNode = optionPathNode;
}
- for (let t = 0; t < path8.length; ++t) {
+ for (let t = 0; t < path10.length; ++t) {
const nextPathNode = injectNode(machine, makeNode());
- registerStatic(machine, lastPathNode, path8[t], nextPathNode, `pushPath`);
+ registerStatic(machine, lastPathNode, path10[t], nextPathNode, `pushPath`);
lastPathNode = nextPathNode;
}
if (this.arity.leading.length > 0 || !this.arity.proxy) {
@@ -38102,8 +38182,8 @@ var Cli = class {
const index = builder.cliIndex;
const paths = (_a = commandClass.paths) !== null && _a !== void 0 ? _a : command.paths;
if (typeof paths !== `undefined`)
- for (const path8 of paths)
- builder.addPath(path8);
+ for (const path10 of paths)
+ builder.addPath(path10);
this.registrations.set(commandClass, { specs, builder, index });
for (const [key, { definition }] of specs.entries())
definition(builder, key);
@@ -38203,13 +38283,13 @@ var Cli = class {
for (const [commandClass, { index }] of this.registrations) {
if (typeof commandClass.usage === `undefined`)
continue;
- const { usage: path8 } = this.getUsageByIndex(index, { detailed: false });
+ const { usage: path10 } = this.getUsageByIndex(index, { detailed: false });
const { usage, options } = this.getUsageByIndex(index, { detailed: true, inlineOptions: false });
const category = typeof commandClass.usage.category !== `undefined` ? formatMarkdownish(commandClass.usage.category, { format: this.format(colored), paragraphs: false }) : void 0;
const description = typeof commandClass.usage.description !== `undefined` ? formatMarkdownish(commandClass.usage.description, { format: this.format(colored), paragraphs: false }) : void 0;
const details = typeof commandClass.usage.details !== `undefined` ? formatMarkdownish(commandClass.usage.details, { format: this.format(colored), paragraphs: true }) : void 0;
const examples = typeof commandClass.usage.examples !== `undefined` ? commandClass.usage.examples.map(([label, cli]) => [formatMarkdownish(label, { format: this.format(colored), paragraphs: false }), cli.replace(/\$0/g, this.binaryName)]) : void 0;
- data.push({ path: path8, usage, category, description, details, examples, options });
+ data.push({ path: path10, usage, category, description, details, examples, options });
}
return data;
}
@@ -38220,7 +38300,7 @@ var Cli = class {
const paths = commandClass2.paths;
const isDocumented = typeof commandClass2.usage !== `undefined`;
const isExclusivelyDefault = !paths || paths.length === 0 || paths.length === 1 && paths[0].length === 0;
- const isDefault = isExclusivelyDefault || ((_a = paths === null || paths === void 0 ? void 0 : paths.some((path8) => path8.length === 0)) !== null && _a !== void 0 ? _a : false);
+ const isDefault = isExclusivelyDefault || ((_a = paths === null || paths === void 0 ? void 0 : paths.some((path10) => path10.length === 0)) !== null && _a !== void 0 ? _a : false);
if (isDefault) {
if (command) {
command = null;
@@ -38675,7 +38755,7 @@ function String2(descriptor, ...args) {
}
// package.json
-var version = "0.19.0";
+var version = "0.20.0";
// sources/Engine.ts
var import_fs3 = __toESM(require("fs"));
@@ -38687,7 +38767,7 @@ var import_semver3 = __toESM(require_semver2());
var config_default = {
definitions: {
npm: {
- default: "9.7.2+sha1.95fdbba1b3b8e918f253cac1a45b38655f324ed5",
+ default: "9.8.1+sha1.b8f070cc770128b38017160491504184863329f0",
fetchLatestFrom: {
type: "npm",
package: "npm"
@@ -38713,12 +38793,15 @@ var config_default = {
registry: {
type: "npm",
package: "npm"
+ },
+ commands: {
+ use: ["npm", "install"]
}
}
}
},
pnpm: {
- default: "8.6.3+sha1.30b2ad9776a393ccd1766239fd21e0c14a3c3acf",
+ default: "8.6.12+sha1.a2f983fbf8f2531dc85db2a5d7f398063d51a6f3",
fetchLatestFrom: {
type: "npm",
package: "pnpm"
@@ -38748,6 +38831,9 @@ var config_default = {
registry: {
type: "npm",
package: "pnpm"
+ },
+ commands: {
+ use: ["pnpm", "install"]
}
},
">=6.0.0": {
@@ -38759,6 +38845,9 @@ var config_default = {
registry: {
type: "npm",
package: "pnpm"
+ },
+ commands: {
+ use: ["pnpm", "install"]
}
}
}
@@ -38770,7 +38859,7 @@ var config_default = {
package: "yarn"
},
transparent: {
- default: "3.6.0+sha224.19e47520fa56c6146388fdeb438d9dcf6630c3f277a2e1180995c3bb",
+ default: "3.6.3+sha224.524038cfca4ec8b2d45164c9c13f05e57a7d762f30542a8d647f69e3",
commands: [
[
"yarn",
@@ -38788,6 +38877,9 @@ var config_default = {
registry: {
type: "npm",
package: "yarn"
+ },
+ commands: {
+ use: ["yarn", "install"]
}
},
">=2.0.0": {
@@ -38804,6 +38896,9 @@ var config_default = {
tags: "latest",
versions: "tags"
}
+ },
+ commands: {
+ use: ["yarn", "install"]
}
}
}
@@ -38854,10 +38949,10 @@ function getTemporaryFolder(target = (0, import_os.tmpdir)()) {
while (true) {
const rnd = Math.random() * 4294967296;
const hex = rnd.toString(16).padStart(8, `0`);
- const path8 = (0, import_path.join)(target, `corepack-${import_process.default.pid}-${hex}`);
+ const path10 = (0, import_path.join)(target, `corepack-${import_process.default.pid}-${hex}`);
try {
- (0, import_fs.mkdirSync)(path8);
- return path8;
+ (0, import_fs.mkdirSync)(path10);
+ return path10;
} catch (error) {
if (error.code === `EEXIST`) {
continue;
@@ -38870,8 +38965,8 @@ function getTemporaryFolder(target = (0, import_os.tmpdir)()) {
// sources/fsUtils.ts
var import_promises = require("fs/promises");
-async function rimraf(path8) {
- return (0, import_promises.rm)(path8, { recursive: true, force: true });
+async function rimraf(path10) {
+ return (0, import_promises.rm)(path10, { recursive: true, force: true });
}
// sources/httpUtils.ts
@@ -39027,9 +39122,15 @@ async function installVersion(installTarget, locator, { spec }) {
const { default: tar } = await Promise.resolve().then(() => __toESM(require_tar()));
const { version: version2, build } = import_semver.default.parse(locator.reference);
const installFolder = import_path2.default.join(installTarget, locator.name, version2);
- if (import_fs2.default.existsSync(installFolder)) {
+ const corepackFile = import_path2.default.join(installFolder, `.corepack`);
+ if (import_fs2.default.existsSync(corepackFile)) {
+ const corepackContent = await import_fs2.default.promises.readFile(corepackFile, `utf8`);
+ const corepackData = JSON.parse(corepackContent);
log(`Reusing ${locator.name}@${locator.reference}`);
- return installFolder;
+ return {
+ hash: corepackData.hash,
+ location: installFolder
+ };
}
const defaultNpmRegistryURL = spec.url.replace(`{}`, version2);
const url = process.env.COREPACK_NPM_REGISTRY ? defaultNpmRegistryURL.replace(
@@ -39050,11 +39151,21 @@ async function installVersion(installTarget, locator, { spec }) {
sendTo = import_fs2.default.createWriteStream(outputFile);
}
stream.pipe(sendTo);
- const hash = build[0] ? stream.pipe((0, import_crypto.createHash)(build[0])) : null;
+ const algo = build[0] ?? `sha256`;
+ const hash = stream.pipe((0, import_crypto.createHash)(algo));
await (0, import_events.once)(sendTo, `finish`);
- const actualHash = hash?.digest(`hex`);
- if (actualHash !== build[1])
+ const actualHash = hash.digest(`hex`);
+ if (build[1] && actualHash !== build[1])
throw new Error(`Mismatch hashes. Expected ${build[1]}, got ${actualHash}`);
+ const serializedHash = `${algo}.${actualHash}`;
+ await import_fs2.default.promises.writeFile(import_path2.default.join(tmpFolder, `.corepack`), JSON.stringify({
+ locator,
+ hash: serializedHash
+ }));
+ await import_fs2.default.promises.rm(installFolder, {
+ recursive: true,
+ force: true
+ });
await import_fs2.default.promises.mkdir(import_path2.default.dirname(installFolder), { recursive: true });
try {
await import_fs2.default.promises.rename(tmpFolder, installFolder);
@@ -39068,7 +39179,10 @@ async function installVersion(installTarget, locator, { spec }) {
}
}
log(`Install finished`);
- return installFolder;
+ return {
+ location: installFolder,
+ hash: serializedHash
+ };
}
async function runVersion(locator, installSpec, binName, args) {
let binPath = null;
@@ -39167,6 +39281,16 @@ var Engine = class {
}
return null;
}
+ getPackageManagerSpecFor(locator) {
+ const definition = this.config.definitions[locator.name];
+ if (typeof definition === `undefined`)
+ throw new UsageError(`This package manager (${locator.name}) isn't supported by this corepack build`);
+ const ranges = Object.keys(definition.ranges).reverse();
+ const range = ranges.find((range2) => satisfiesWithPrereleases(locator.reference, range2));
+ if (typeof range === `undefined`)
+ throw new Error(`Assertion failed: Specified resolution (${locator.reference}) isn't supported by any of ${ranges.join(`, `)}`);
+ return definition.ranges[range];
+ }
getBinariesFor(name) {
const binNames = /* @__PURE__ */ new Set();
for (const rangeDefinition of Object.values(this.config.definitions[name].ranges)) {
@@ -39222,21 +39346,18 @@ var Engine = class {
`);
}
async ensurePackageManager(locator) {
- const definition = this.config.definitions[locator.name];
- if (typeof definition === `undefined`)
- throw new UsageError(`This package manager (${locator.name}) isn't supported by this corepack build`);
- const ranges = Object.keys(definition.ranges).reverse();
- const range = ranges.find((range2) => satisfiesWithPrereleases(locator.reference, range2));
- if (typeof range === `undefined`)
- throw new Error(`Assertion failed: Specified resolution (${locator.reference}) isn't supported by any of ${ranges.join(`, `)}`);
- const installedLocation = await installVersion(getInstallFolder(), locator, {
- spec: definition.ranges[range]
+ const spec = this.getPackageManagerSpecFor(locator);
+ const packageManagerInfo = await installVersion(getInstallFolder(), locator, {
+ spec
});
return {
- location: installedLocation,
- spec: definition.ranges[range]
+ ...packageManagerInfo,
+ locator,
+ spec
};
}
+ async fetchAvailableVersions() {
+ }
async resolveDescriptor(descriptor, { allowTags = false, useCache = true } = {}) {
const definition = this.config.definitions[descriptor.name];
if (typeof definition === `undefined`)
@@ -39244,7 +39365,7 @@ var Engine = class {
let finalDescriptor = descriptor;
if (!import_semver3.default.valid(descriptor.range) && !import_semver3.default.validRange(descriptor.range)) {
if (!allowTags)
- throw new UsageError(`Packages managers can't be referended via tags in this context`);
+ throw new UsageError(`Packages managers can't be referenced via tags in this context`);
const ranges = Object.keys(definition.ranges);
const tagRange = ranges[ranges.length - 1];
const tags = await fetchAvailableTags2(definition.ranges[tagRange].registry);
@@ -39260,21 +39381,14 @@ var Engine = class {
return { name: finalDescriptor.name, reference: cachedVersion };
if (import_semver3.default.valid(finalDescriptor.range))
return { name: finalDescriptor.name, reference: finalDescriptor.range };
- const candidateRangeDefinitions = Object.keys(definition.ranges).filter((range) => {
- return satisfiesWithPrereleases(finalDescriptor.range, range);
- });
- const tagResolutions = await Promise.all(candidateRangeDefinitions.map(async (range) => {
- return [range, await fetchAvailableVersions2(definition.ranges[range].registry)];
+ const versions = await Promise.all(Object.keys(definition.ranges).map(async (range) => {
+ const versions2 = await fetchAvailableVersions2(definition.ranges[range].registry);
+ return versions2.filter((version2) => satisfiesWithPrereleases(version2, finalDescriptor.range));
}));
- const resolutionMap = /* @__PURE__ */ new Map();
- for (const [range, resolutions] of tagResolutions)
- for (const entry of resolutions)
- resolutionMap.set(entry, range);
- const candidates = [...resolutionMap.keys()];
- const maxSatisfying = import_semver3.default.maxSatisfying(candidates, finalDescriptor.range);
- if (maxSatisfying === null)
+ const highestVersion = [...new Set(versions.flat())].sort(import_semver3.default.rcompare);
+ if (highestVersion.length === 0)
return null;
- return { name: finalDescriptor.name, reference: maxSatisfying };
+ return { name: finalDescriptor.name, reference: highestVersion[0] };
}
getLastKnownGoodFile() {
return import_path3.default.join(getInstallFolder(), `lastKnownGood.json`);
@@ -39432,78 +39546,13 @@ EnableCommand.usage = Command.Usage({
]]
});
-// sources/commands/Hydrate.ts
-var import_promises2 = require("fs/promises");
-var import_path6 = __toESM(require("path"));
-var HydrateCommand = class extends Command {
- constructor() {
- super(...arguments);
- this.activate = options_exports.Boolean(`--activate`, false, {
- description: `If true, this release will become the default one for this package manager`
- });
- this.fileName = options_exports.String();
- }
- async execute() {
- const installFolder = getInstallFolder();
- const fileName = import_path6.default.resolve(this.context.cwd, this.fileName);
- const archiveEntries = /* @__PURE__ */ new Map();
- let hasShortEntries = false;
- const { default: tar } = await Promise.resolve().then(() => __toESM(require_tar()));
- await tar.t({ file: fileName, onentry: (entry) => {
- const segments = entry.path.split(/\//g);
- if (segments.length < 3) {
- hasShortEntries = true;
- } else {
- let references = archiveEntries.get(segments[0]);
- if (typeof references === `undefined`)
- archiveEntries.set(segments[0], references = /* @__PURE__ */ new Set());
- references.add(segments[1]);
- }
- } });
- if (hasShortEntries || archiveEntries.size < 1)
- throw new UsageError(`Invalid archive format; did it get generated by 'corepack prepare'?`);
- for (const [name, references] of archiveEntries) {
- for (const reference of references) {
- if (!isSupportedPackageManager(name))
- throw new UsageError(`Unsupported package manager '${name}'`);
- if (this.activate)
- this.context.stdout.write(`Hydrating ${name}@${reference} for immediate activation...
-`);
- else
- this.context.stdout.write(`Hydrating ${name}@${reference}...
-`);
- await (0, import_promises2.mkdir)(installFolder, { recursive: true });
- await tar.x({ file: fileName, cwd: installFolder }, [`${name}/${reference}`]);
- if (this.activate) {
- await this.context.engine.activatePackageManager({ name, reference });
- }
- }
- }
- this.context.stdout.write(`All done!
-`);
- }
-};
-HydrateCommand.paths = [
- [`hydrate`]
-];
-HydrateCommand.usage = Command.Usage({
- description: `Import a package manager into the cache`,
- details: `
- This command unpacks a package manager archive into the cache. The archive must have been generated by the \`corepack prepare\` command - no other will work.
- `,
- examples: [[
- `Import a package manager in the cache`,
- `$0 hydrate corepack.tgz`
- ]]
-});
-
-// sources/commands/Prepare.ts
-var import_promises3 = require("fs/promises");
-var import_path8 = __toESM(require("path"));
+// sources/commands/InstallGlobal.ts
+var import_fs8 = __toESM(require("fs"));
+var import_path7 = __toESM(require("path"));
// sources/specUtils.ts
var import_fs6 = __toESM(require("fs"));
-var import_path7 = __toESM(require("path"));
+var import_path6 = __toESM(require("path"));
var import_semver4 = __toESM(require_semver2());
var nodeModulesRegExp = /[\\/]node_modules[\\/](@[^\\/]*[\\/])?([^@\\/][^\\/]*)$/;
function parseSpec(raw, source, { enforceExactVersion = true } = {}) {
@@ -39556,10 +39605,10 @@ async function loadSpec(initialCwd) {
let selection = null;
while (nextCwd !== currCwd && (!selection || !selection.data.packageManager)) {
currCwd = nextCwd;
- nextCwd = import_path7.default.dirname(currCwd);
+ nextCwd = import_path6.default.dirname(currCwd);
if (nodeModulesRegExp.test(currCwd))
continue;
- const manifestPath = import_path7.default.join(currCwd, `package.json`);
+ const manifestPath = import_path6.default.join(currCwd, `package.json`);
if (!import_fs6.default.existsSync(manifestPath))
continue;
const content = await import_fs6.default.promises.readFile(manifestPath, `utf8`);
@@ -39569,21 +39618,449 @@ async function loadSpec(initialCwd) {
} catch {
}
if (typeof data !== `object` || data === null)
- throw new UsageError(`Invalid package.json in ${import_path7.default.relative(initialCwd, manifestPath)}`);
+ throw new UsageError(`Invalid package.json in ${import_path6.default.relative(initialCwd, manifestPath)}`);
selection = { data, manifestPath };
}
if (selection === null)
- return { type: `NoProject`, target: import_path7.default.join(initialCwd, `package.json`) };
+ return { type: `NoProject`, target: import_path6.default.join(initialCwd, `package.json`) };
const rawPmSpec = selection.data.packageManager;
if (typeof rawPmSpec === `undefined`)
return { type: `NoSpec`, target: selection.manifestPath };
return {
type: `Found`,
- spec: parseSpec(rawPmSpec, import_path7.default.relative(initialCwd, selection.manifestPath))
+ target: selection.manifestPath,
+ spec: parseSpec(rawPmSpec, import_path6.default.relative(initialCwd, selection.manifestPath))
+ };
+}
+
+// sources/commands/Base.ts
+var import_fs7 = __toESM(require("fs"));
+
+// sources/nodeUtils.ts
+var import_os2 = __toESM(require("os"));
+function getEndOfLine(content) {
+ const matches = content.match(/\r?\n/g);
+ if (matches === null)
+ return import_os2.default.EOL;
+ const crlf = matches.filter((nl) => nl === `\r
+`).length;
+ const lf = matches.length - crlf;
+ return crlf > lf ? `\r
+` : `
+`;
+}
+function normalizeLineEndings(originalContent, newContent) {
+ return newContent.replace(/\r?\n/g, getEndOfLine(originalContent));
+}
+function getIndent(content) {
+ const indentMatch = content.match(/^[ \t]+/m);
+ if (indentMatch) {
+ return indentMatch[0];
+ } else {
+ return ` `;
+ }
+}
+function stripBOM(content) {
+ if (content.charCodeAt(0) === 65279) {
+ return content.slice(1);
+ } else {
+ return content;
+ }
+}
+function readPackageJson(content) {
+ return {
+ data: JSON.parse(stripBOM(content) || `{}`),
+ indent: getIndent(content)
};
}
-// sources/commands/Prepare.ts
+// sources/commands/Base.ts
+var BaseCommand = class extends Command {
+ async resolvePatternsToDescriptors({ all, patterns }) {
+ if (all && patterns.length > 0)
+ throw new UsageError(`The --all option cannot be used along with an explicit package manager specification`);
+ const resolvedSpecs = all ? await this.context.engine.getDefaultDescriptors() : patterns.map((pattern) => parseSpec(pattern, `CLI arguments`, { enforceExactVersion: false }));
+ if (resolvedSpecs.length === 0) {
+ const lookup = await loadSpec(this.context.cwd);
+ switch (lookup.type) {
+ case `NoProject`:
+ throw new UsageError(`Couldn't find a project in the local directory - please explicit the package manager to pack, or run this command from a valid project`);
+ case `NoSpec`:
+ throw new UsageError(`The local project doesn't feature a 'packageManager' field - please explicit the package manager to pack, or update the manifest to reference it`);
+ default: {
+ return [lookup.spec];
+ }
+ }
+ }
+ return resolvedSpecs;
+ }
+ async setLocalPackageManager(info) {
+ const lookup = await loadSpec(this.context.cwd);
+ const content = lookup.target !== `NoProject` ? await import_fs7.default.promises.readFile(lookup.target, `utf8`) : ``;
+ const { data, indent } = readPackageJson(content);
+ const previousPackageManager = data.packageManager ?? `unknown`;
+ data.packageManager = `${info.locator.name}@${info.locator.reference}+${info.hash}`;
+ const newContent = normalizeLineEndings(content, `${JSON.stringify(data, null, indent)}
+`);
+ await import_fs7.default.promises.writeFile(lookup.target, newContent, `utf8`);
+ const command = this.context.engine.getPackageManagerSpecFor(info.locator).commands?.use ?? null;
+ if (command === null)
+ return 0;
+ process.env.COREPACK_MIGRATE_FROM = previousPackageManager;
+ this.context.stdout.write(`
+`);
+ const [binaryName, ...args] = command;
+ return await runVersion(info.locator, info, binaryName, args);
+ }
+};
+
+// sources/commands/InstallGlobal.ts
+var InstallGlobalCommand = class extends BaseCommand {
+ constructor() {
+ super(...arguments);
+ this.global = options_exports.Boolean(`-g,--global`, {
+ required: true
+ });
+ this.all = options_exports.Boolean(`--all`, false, {
+ description: `If true, all available default package managers will be installed`
+ });
+ this.cacheOnly = options_exports.Boolean(`--cache-only`, false, {
+ description: `If true, the package managers will only be cached, not set as new defaults`
+ });
+ this.args = options_exports.Rest();
+ }
+ async execute() {
+ if (this.args.length === 0 && !this.all)
+ throw new UsageError(`No package managers specified; use --all to install all available package managers, or specify one or more package managers to proceed`);
+ if (!this.all) {
+ for (const arg of this.args) {
+ if (arg.endsWith(`.tgz`)) {
+ await this.installFromTarball(import_path7.default.resolve(this.context.cwd, arg));
+ } else {
+ await this.installFromDescriptor(parseSpec(arg, `CLI arguments`, { enforceExactVersion: false }));
+ }
+ }
+ } else {
+ for (const descriptor of await this.context.engine.getDefaultDescriptors()) {
+ await this.installFromDescriptor(descriptor);
+ }
+ }
+ }
+ log(locator) {
+ if (this.cacheOnly) {
+ this.context.stdout.write(`Adding ${locator.name}@${locator.reference} to the cache...
+`);
+ } else {
+ this.context.stdout.write(`Installing ${locator.name}@${locator.reference}...
+`);
+ }
+ }
+ async installFromDescriptor(descriptor) {
+ const resolved = await this.context.engine.resolveDescriptor(descriptor, { allowTags: true, useCache: false });
+ if (resolved === null)
+ throw new UsageError(`Failed to successfully resolve '${descriptor.range}' to a valid ${descriptor.name} release`);
+ this.log(resolved);
+ await this.context.engine.ensurePackageManager(resolved);
+ if (!this.cacheOnly) {
+ await this.context.engine.activatePackageManager(resolved);
+ }
+ }
+ async installFromTarball(p) {
+ const installFolder = getInstallFolder();
+ const archiveEntries = /* @__PURE__ */ new Map();
+ const { default: tar } = await Promise.resolve().then(() => __toESM(require_tar()));
+ let hasShortEntries = false;
+ await tar.t({ file: p, onentry: (entry) => {
+ const segments = entry.path.split(/\//g);
+ if (segments.length > 0 && segments[segments.length - 1] !== `.corepack`)
+ return;
+ if (segments.length < 3) {
+ hasShortEntries = true;
+ } else {
+ let references = archiveEntries.get(segments[0]);
+ if (typeof references === `undefined`)
+ archiveEntries.set(segments[0], references = /* @__PURE__ */ new Set());
+ references.add(segments[1]);
+ }
+ } });
+ if (hasShortEntries || archiveEntries.size < 1)
+ throw new UsageError(`Invalid archive format; did it get generated by 'corepack pack'?`);
+ for (const [name, references] of archiveEntries) {
+ for (const reference of references) {
+ if (!isSupportedPackageManager(name))
+ throw new UsageError(`Unsupported package manager '${name}'`);
+ this.log({ name, reference });
+ await import_fs8.default.promises.mkdir(installFolder, { recursive: true });
+ await tar.x({ file: p, cwd: installFolder }, [`${name}/${reference}`]);
+ if (!this.cacheOnly) {
+ await this.context.engine.activatePackageManager({ name, reference });
+ }
+ }
+ }
+ }
+};
+InstallGlobalCommand.paths = [
+ [`install`]
+];
+InstallGlobalCommand.usage = Command.Usage({
+ description: `Install package managers on the system`,
+ details: `
+ Download the selected package managers and install them on the system.
+
+ Package managers thus installed will be configured as the new default when calling their respective binaries outside of projects defining the 'packageManager' field.
+ `,
+ examples: [[
+ `Install the latest version of Yarn 1.x and make it globally available`,
+ `corepack install -g yarn@^1`
+ ], [
+ `Install the latest version of all available package managers, and make them globally available`,
+ `corepack install -g --all`
+ ]]
+});
+
+// sources/commands/InstallLocal.ts
+var InstallLocalCommand = class extends BaseCommand {
+ async execute() {
+ const [descriptor] = await this.resolvePatternsToDescriptors({
+ all: false,
+ patterns: []
+ });
+ const resolved = await this.context.engine.resolveDescriptor(descriptor, { allowTags: true });
+ if (resolved === null)
+ throw new UsageError(`Failed to successfully resolve '${descriptor.range}' to a valid ${descriptor.name} release`);
+ this.context.stdout.write(`Adding ${resolved.name}@${resolved.reference} to the cache...
+`);
+ await this.context.engine.ensurePackageManager(resolved);
+ }
+};
+InstallLocalCommand.paths = [
+ [`install`]
+];
+InstallLocalCommand.usage = Command.Usage({
+ description: `Install the package manager configured in the local project`,
+ details: `
+ Download and install the package manager configured in the local project. This command doesn't change the global version used when running the package manager from outside the project (use the \`-g,--global\` flag if you wish to do this).
+ `,
+ examples: [[
+ `Install the project's package manager in the cache`,
+ `corepack install`
+ ]]
+});
+
+// sources/commands/Pack.ts
+var import_promises2 = require("fs/promises");
+var import_path8 = __toESM(require("path"));
+var PackCommand = class extends BaseCommand {
+ constructor() {
+ super(...arguments);
+ this.all = options_exports.Boolean(`--all`, false, {
+ description: `If true, all available default package managers will be installed`
+ });
+ this.json = options_exports.Boolean(`--json`, false, {
+ description: `If true, the path to the generated tarball will be printed on stdout`
+ });
+ this.output = options_exports.String(`-o,--output`, {
+ description: `Where the tarball should be generated; by default "corepack.tgz"`
+ });
+ this.patterns = options_exports.Rest();
+ }
+ async execute() {
+ const descriptors = await this.resolvePatternsToDescriptors({
+ all: this.all,
+ patterns: this.patterns
+ });
+ const installLocations = [];
+ for (const descriptor of descriptors) {
+ const resolved = await this.context.engine.resolveDescriptor(descriptor, { allowTags: true, useCache: false });
+ if (resolved === null)
+ throw new UsageError(`Failed to successfully resolve '${descriptor.range}' to a valid ${descriptor.name} release`);
+ this.context.stdout.write(`Adding ${resolved.name}@${resolved.reference} to the cache...
+`);
+ const packageManagerInfo = await this.context.engine.ensurePackageManager(resolved);
+ await this.context.engine.activatePackageManager(packageManagerInfo.locator);
+ installLocations.push(packageManagerInfo.location);
+ }
+ const baseInstallFolder = getInstallFolder();
+ const outputPath = import_path8.default.resolve(this.context.cwd, this.output ?? `corepack.tgz`);
+ if (!this.json) {
+ this.context.stdout.write(`
+`);
+ this.context.stdout.write(`Packing the selected tools in ${import_path8.default.basename(outputPath)}...
+`);
+ }
+ const { default: tar } = await Promise.resolve().then(() => __toESM(require_tar()));
+ await (0, import_promises2.mkdir)(baseInstallFolder, { recursive: true });
+ await tar.c({ gzip: true, cwd: baseInstallFolder, file: import_path8.default.resolve(outputPath) }, installLocations.map((location) => {
+ return import_path8.default.relative(baseInstallFolder, location);
+ }));
+ if (this.json) {
+ this.context.stdout.write(`${JSON.stringify(outputPath)}
+`);
+ } else {
+ this.context.stdout.write(`All done!
+`);
+ }
+ }
+};
+PackCommand.paths = [
+ [`pack`]
+];
+PackCommand.usage = Command.Usage({
+ description: `Store package managers in a tarball`,
+ details: `
+ Download the selected package managers and store them inside a tarball suitable for use with \`corepack install -g\`.
+ `,
+ examples: [[
+ `Pack the package manager defined in the package.json file`,
+ `corepack pack`
+ ], [
+ `Pack the latest version of Yarn 1.x inside a file named corepack.tgz`,
+ `corepack pack yarn@^1`
+ ], [
+ `Pack the latest versions of all supported package managers inside a file named everything.tgz`,
+ `corepack pack --all -o everything.tgz`
+ ]]
+});
+
+// sources/commands/Up.ts
+var import_semver5 = __toESM(require_semver2());
+var UpCommand = class extends BaseCommand {
+ async execute() {
+ const [descriptor] = await this.resolvePatternsToDescriptors({
+ all: false,
+ patterns: []
+ });
+ if (!import_semver5.default.valid(descriptor.range) && !import_semver5.default.validRange(descriptor.range))
+ throw new UsageError(`The 'corepack up' command can only be used when your project's packageManager field is set to a semver version or semver range`);
+ const resolved = await this.context.engine.resolveDescriptor(descriptor, { useCache: false });
+ if (!resolved)
+ throw new UsageError(`Failed to successfully resolve '${descriptor.range}' to a valid ${descriptor.name} release`);
+ const majorVersion = import_semver5.default.major(resolved?.reference);
+ const majorDescriptor = { name: descriptor.name, range: `^${majorVersion}.0.0` };
+ const highestVersion = await this.context.engine.resolveDescriptor(majorDescriptor, { useCache: false });
+ if (!highestVersion)
+ throw new UsageError(`Failed to find the highest release for ${descriptor.name} ${majorVersion}.x`);
+ this.context.stdout.write(`Installing ${highestVersion.name}@${highestVersion.reference} in the project...
+`);
+ const packageManagerInfo = await this.context.engine.ensurePackageManager(highestVersion);
+ await this.setLocalPackageManager(packageManagerInfo);
+ }
+};
+UpCommand.paths = [
+ [`up`]
+];
+UpCommand.usage = Command.Usage({
+ description: `Update the package manager used in the current project`,
+ details: `
+ Retrieve the latest available version for the current major release line
+ of the package manager used in the local project, and update the project
+ to use it.
+
+ Unlike \`corepack use\` this command doesn't take a package manager name
+ nor a version range, as it will always select the latest available
+ version from the same major line. Should you need to upgrade to a new
+ major, use an explicit \`corepack use '{name}@*'\` call.
+ `,
+ examples: [[
+ `Configure the project to use the latest Yarn release`,
+ `corepack up`
+ ]]
+});
+
+// sources/commands/Use.ts
+var UseCommand = class extends BaseCommand {
+ constructor() {
+ super(...arguments);
+ this.pattern = options_exports.String();
+ }
+ async execute() {
+ const [descriptor] = await this.resolvePatternsToDescriptors({
+ all: false,
+ patterns: [this.pattern]
+ });
+ const resolved = await this.context.engine.resolveDescriptor(descriptor, { allowTags: true, useCache: false });
+ if (resolved === null)
+ throw new UsageError(`Failed to successfully resolve '${descriptor.range}' to a valid ${descriptor.name} release`);
+ this.context.stdout.write(`Installing ${resolved.name}@${resolved.reference} in the project...
+`);
+ const packageManagerInfo = await this.context.engine.ensurePackageManager(resolved);
+ await this.setLocalPackageManager(packageManagerInfo);
+ }
+};
+UseCommand.paths = [
+ [`use`]
+];
+UseCommand.usage = Command.Usage({
+ description: `Define the package manager to use for the current project`,
+ details: `
+ When run, this command will retrieve the latest release matching the
+ provided descriptor, assign it to the project's package.json file, and
+ automatically perform an install.
+ `,
+ examples: [[
+ `Configure the project to use the latest Yarn release`,
+ `corepack use 'yarn@*'`
+ ]]
+});
+
+// sources/commands/deprecated/Hydrate.ts
+var import_promises3 = require("fs/promises");
+var import_path9 = __toESM(require("path"));
+var HydrateCommand = class extends Command {
+ constructor() {
+ super(...arguments);
+ this.activate = options_exports.Boolean(`--activate`, false, {
+ description: `If true, this release will become the default one for this package manager`
+ });
+ this.fileName = options_exports.String();
+ }
+ async execute() {
+ const installFolder = getInstallFolder();
+ const fileName = import_path9.default.resolve(this.context.cwd, this.fileName);
+ const archiveEntries = /* @__PURE__ */ new Map();
+ let hasShortEntries = false;
+ const { default: tar } = await Promise.resolve().then(() => __toESM(require_tar()));
+ await tar.t({ file: fileName, onentry: (entry) => {
+ const segments = entry.path.split(/\//g);
+ if (segments.length < 3) {
+ hasShortEntries = true;
+ } else {
+ let references = archiveEntries.get(segments[0]);
+ if (typeof references === `undefined`)
+ archiveEntries.set(segments[0], references = /* @__PURE__ */ new Set());
+ references.add(segments[1]);
+ }
+ } });
+ if (hasShortEntries || archiveEntries.size < 1)
+ throw new UsageError(`Invalid archive format; did it get generated by 'corepack prepare'?`);
+ for (const [name, references] of archiveEntries) {
+ for (const reference of references) {
+ if (!isSupportedPackageManager(name))
+ throw new UsageError(`Unsupported package manager '${name}'`);
+ if (this.activate)
+ this.context.stdout.write(`Hydrating ${name}@${reference} for immediate activation...
+`);
+ else
+ this.context.stdout.write(`Hydrating ${name}@${reference}...
+`);
+ await (0, import_promises3.mkdir)(installFolder, { recursive: true });
+ await tar.x({ file: fileName, cwd: installFolder }, [`${name}/${reference}`]);
+ if (this.activate) {
+ await this.context.engine.activatePackageManager({ name, reference });
+ }
+ }
+ }
+ this.context.stdout.write(`All done!
+`);
+ }
+};
+HydrateCommand.paths = [
+ [`hydrate`]
+];
+
+// sources/commands/deprecated/Prepare.ts
+var import_promises4 = require("fs/promises");
+var import_path10 = __toESM(require("path"));
var PrepareCommand = class extends Command {
constructor() {
super(...arguments);
@@ -39642,14 +40119,14 @@ var PrepareCommand = class extends Command {
if (this.output) {
const outputName = typeof this.output === `string` ? this.output : `corepack.tgz`;
const baseInstallFolder = getInstallFolder();
- const outputPath = import_path8.default.resolve(this.context.cwd, outputName);
+ const outputPath = import_path10.default.resolve(this.context.cwd, outputName);
if (!this.json)
- this.context.stdout.write(`Packing the selected tools in ${import_path8.default.basename(outputPath)}...
+ this.context.stdout.write(`Packing the selected tools in ${import_path10.default.basename(outputPath)}...
`);
const { default: tar } = await Promise.resolve().then(() => __toESM(require_tar()));
- await (0, import_promises3.mkdir)(baseInstallFolder, { recursive: true });
- await tar.c({ gzip: true, cwd: baseInstallFolder, file: import_path8.default.resolve(outputPath) }, installLocations.map((location) => {
- return import_path8.default.relative(baseInstallFolder, location);
+ await (0, import_promises4.mkdir)(baseInstallFolder, { recursive: true });
+ await tar.c({ gzip: true, cwd: baseInstallFolder, file: import_path10.default.resolve(outputPath) }, installLocations.map((location) => {
+ return import_path10.default.relative(baseInstallFolder, location);
}));
if (this.json) {
this.context.stdout.write(`${JSON.stringify(outputPath)}
@@ -39664,30 +40141,6 @@ var PrepareCommand = class extends Command {
PrepareCommand.paths = [
[`prepare`]
];
-PrepareCommand.usage = Command.Usage({
- description: `Generate a package manager archive`,
- details: `
- This command makes sure that the specified package managers are installed in the local cache. Calling this command explicitly unless you operate in an environment without network access (in which case you'd have to call \`prepare\` while building your image, to make sure all tools are available for later use).
-
- When the \`-o,--output\` flag is set, Corepack will also compress the resulting package manager into a format suitable for \`corepack hydrate\`, and will store it at the specified location on the disk.
- `,
- examples: [[
- `Prepare the package manager from the active project`,
- `$0 prepare`
- ], [
- `Prepare a specific Yarn version`,
- `$0 prepare yarn@2.2.2`
- ], [
- `Prepare the latest available pnpm version`,
- `$0 prepare pnpm@latest --activate`
- ], [
- `Generate an archive for a specific Yarn version`,
- `$0 prepare yarn@2.2.2 -o`
- ], [
- `Generate a named archive`,
- `$0 prepare yarn@2.2.2 --output=yarn.tgz`
- ]]
-});
// sources/miscUtils.ts
var Cancellation = class extends Error {
@@ -39754,6 +40207,7 @@ async function runMain(argv) {
};
const [firstArg, ...restArgs] = argv;
const request = getPackageManagerRequestFromCli(firstArg, context);
+ let code;
if (!request) {
const cli = new Cli({
binaryLabel: `Corepack`,
@@ -39762,11 +40216,16 @@ async function runMain(argv) {
});
cli.register(builtins_exports.HelpCommand);
cli.register(builtins_exports.VersionCommand);
- cli.register(EnableCommand);
cli.register(DisableCommand);
+ cli.register(EnableCommand);
+ cli.register(InstallGlobalCommand);
+ cli.register(InstallLocalCommand);
+ cli.register(PackCommand);
+ cli.register(UpCommand);
+ cli.register(UseCommand);
cli.register(HydrateCommand);
cli.register(PrepareCommand);
- await cli.runExit(argv, context);
+ code = await cli.run(argv, context);
} else {
const cli = new Cli({
binaryLabel: `'${request.binaryName}', via Corepack`,
@@ -39782,10 +40241,10 @@ async function runMain(argv) {
return executePackageManagerRequest(request, this.proxy, this.context);
}
});
- const code = await cli.run(restArgs, context);
- if (code !== 0) {
- process.exitCode ??= code;
- }
+ code = await cli.run(restArgs, context);
+ }
+ if (code !== 0) {
+ process.exitCode ??= code;
}
}
// Annotate the CommonJS export names for ESM import in node:
diff --git a/deps/corepack/package.json b/deps/corepack/package.json
index 5a9940b50e3e22..dc168d9389b1aa 100644
--- a/deps/corepack/package.json
+++ b/deps/corepack/package.json
@@ -1,6 +1,6 @@
{
"name": "corepack",
- "version": "0.19.0",
+ "version": "0.20.0",
"homepage": "https://github.com/nodejs/corepack#readme",
"bugs": {
"url": "https://github.com/nodejs/corepack/issues"
@@ -24,14 +24,14 @@
"@jest/globals": "^29.0.0",
"@types/debug": "^4.1.5",
"@types/jest": "^29.0.0",
- "@types/node": "^20.0.0",
+ "@types/node": "^20.4.6",
"@types/semver": "^7.1.0",
"@types/tar": "^6.0.0",
"@types/which": "^3.0.0",
"@typescript-eslint/eslint-plugin": "^5.0.0",
"@typescript-eslint/parser": "^5.0.0",
"@yarnpkg/eslint-config": "^0.6.0-rc.7",
- "@yarnpkg/fslib": "^2.1.0",
+ "@yarnpkg/fslib": "^3.0.0-rc.48",
"@zkochan/cmd-shim": "^6.0.0",
"babel-plugin-dynamic-import-node": "^2.3.3",
"clipanion": "^3.0.1",
@@ -41,8 +41,8 @@
"eslint-plugin-arca": "^0.15.0",
"jest": "^29.0.0",
"nock": "^13.0.4",
- "proxy-agent": "^6.0.0",
- "semver": "^7.1.3",
+ "proxy-agent": "^6.2.2",
+ "semver": "^7.5.2",
"supports-color": "^9.0.0",
"tar": "^6.0.1",
"ts-node": "^10.0.0",
@@ -96,7 +96,7 @@
]
},
"resolutions": {
- "vm2": "patch:vm2@npm:3.9.9#.yarn/patches/vm2-npm-3.9.9-03fd1f4dc5.patch"
+ "vm2": "portal:./vm2"
},
"bin": {
"corepack": "./dist/corepack.js",
diff --git a/deps/googletest/include/gtest/gtest-printers.h b/deps/googletest/include/gtest/gtest-printers.h
index d1766e640f280b..9ccbff7da454b5 100644
--- a/deps/googletest/include/gtest/gtest-printers.h
+++ b/deps/googletest/include/gtest/gtest-printers.h
@@ -122,6 +122,10 @@
#include "gtest/internal/gtest-internal.h"
#include "gtest/internal/gtest-port.h"
+#if GTEST_INTERNAL_HAS_STD_SPAN
+#include // NOLINT
+#endif // GTEST_INTERNAL_HAS_STD_SPAN
+
namespace testing {
// Definitions in the internal* namespaces are subject to change without notice.
@@ -131,13 +135,32 @@ namespace internal {
template
void UniversalPrint(const T& value, ::std::ostream* os);
+template
+struct IsStdSpan {
+ static constexpr bool value = false;
+};
+
+#if GTEST_INTERNAL_HAS_STD_SPAN
+template
+struct IsStdSpan> {
+ static constexpr bool value = true;
+};
+#endif // GTEST_INTERNAL_HAS_STD_SPAN
+
// Used to print an STL-style container when the user doesn't define
// a PrintTo() for it.
+//
+// NOTE: Since std::span does not have const_iterator until C++23, it would
+// fail IsContainerTest before C++23. However, IsContainerTest only uses
+// the presence of const_iterator to avoid treating iterators as containers
+// because of iterator::iterator. Which means std::span satisfies the *intended*
+// condition of IsContainerTest.
struct ContainerPrinter {
template (0)) == sizeof(IsContainer)) &&
- !IsRecursiveContainer::value>::type>
+ ((sizeof(IsContainerTest(0)) == sizeof(IsContainer)) &&
+ !IsRecursiveContainer::value) ||
+ IsStdSpan::value>::type>
static void PrintValue(const T& container, std::ostream* os) {
const size_t kMaxCount = 32; // The maximum number of elements to print.
*os << '{';
diff --git a/deps/googletest/include/gtest/internal/gtest-internal.h b/deps/googletest/include/gtest/internal/gtest-internal.h
index 97a983393a34ff..4f077fcfbe2f03 100644
--- a/deps/googletest/include/gtest/internal/gtest-internal.h
+++ b/deps/googletest/include/gtest/internal/gtest-internal.h
@@ -78,7 +78,7 @@
//
// will result in the token foo__LINE__, instead of foo followed by
// the current line number. For more details, see
-// http://www.parashift.com/c++-faq-lite/misc-technical-issues.html#faq-39.6
+// https://www.parashift.com/c++-faq-lite/misc-technical-issues.html#faq-39.6
#define GTEST_CONCAT_TOKEN_(foo, bar) GTEST_CONCAT_TOKEN_IMPL_(foo, bar)
#define GTEST_CONCAT_TOKEN_IMPL_(foo, bar) foo##bar
@@ -169,7 +169,7 @@ namespace edit_distance {
// All edits cost the same, with replace having lower priority than
// add/remove.
// Simple implementation of the Wagner-Fischer algorithm.
-// See http://en.wikipedia.org/wiki/Wagner-Fischer_algorithm
+// See https://en.wikipedia.org/wiki/Wagner-Fischer_algorithm
enum EditType { kMatch, kAdd, kRemove, kReplace };
GTEST_API_ std::vector CalculateOptimalEdits(
const std::vector& left, const std::vector& right);
@@ -236,7 +236,7 @@ GTEST_API_ std::string GetBoolAssertionFailureMessage(
// For double, there are 11 exponent bits and 52 fraction bits.
//
// More details can be found at
-// http://en.wikipedia.org/wiki/IEEE_floating-point_standard.
+// https://en.wikipedia.org/wiki/IEEE_floating-point_standard.
//
// Template parameter:
//
@@ -281,7 +281,7 @@ class FloatingPoint {
// bits. Therefore, 4 should be enough for ordinary use.
//
// See the following article for more details on ULP:
- // http://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition/
+ // https://randomascii.wordpress.com/2012/02/25/comparing-floating-point-numbers-2012-edition/
static const uint32_t kMaxUlps = 4;
// Constructs a FloatingPoint from a raw floating-point number.
@@ -362,7 +362,7 @@ class FloatingPoint {
// N - 1 (the biggest number representable using
// sign-and-magnitude) is represented by 2N - 1.
//
- // Read http://en.wikipedia.org/wiki/Signed_number_representations
+ // Read https://en.wikipedia.org/wiki/Signed_number_representations
// for more details on signed number representations.
static Bits SignAndMagnitudeToBiased(const Bits& sam) {
if (kSignBitMask & sam) {
diff --git a/deps/googletest/include/gtest/internal/gtest-param-util.h b/deps/googletest/include/gtest/internal/gtest-param-util.h
index 6a81c37fa6afc5..dd39e98a16c899 100644
--- a/deps/googletest/include/gtest/internal/gtest-param-util.h
+++ b/deps/googletest/include/gtest/internal/gtest-param-util.h
@@ -584,7 +584,9 @@ class ParameterizedTestSuiteInfo : public ParameterizedTestSuiteInfoBase {
GTEST_CHECK_(IsValidParamName(param_name))
<< "Parameterized test name '" << param_name
- << "' is invalid, in " << file << " line " << line << std::endl;
+ << "' is invalid (contains spaces, dashes, underscores, or "
+ "non-alphanumeric characters), in "
+ << file << " line " << line << "" << std::endl;
GTEST_CHECK_(test_param_names.count(param_name) == 0)
<< "Duplicate parameterized test name '" << param_name << "', in "
diff --git a/deps/googletest/include/gtest/internal/gtest-port.h b/deps/googletest/include/gtest/internal/gtest-port.h
index b887e24edcd05e..35544a08a65aae 100644
--- a/deps/googletest/include/gtest/internal/gtest-port.h
+++ b/deps/googletest/include/gtest/internal/gtest-port.h
@@ -208,6 +208,8 @@
// or
// UniversalPrinter
// specializations. Always defined to 0 or 1.
+// GTEST_INTERNAL_HAS_STD_SPAN - for enabling UniversalPrinter
+// specializations. Always defined to 0 or 1
// GTEST_INTERNAL_HAS_STRING_VIEW - for enabling Matcher or
// Matcher
// specializations. Always defined to 0 or 1.
@@ -609,7 +611,7 @@ typedef struct _RTL_CRITICAL_SECTION GTEST_CRITICAL_SECTION;
// Determines whether clone(2) is supported.
// Usually it will only be available on Linux, excluding
// Linux on the Itanium architecture.
-// Also see http://linux.die.net/man/2/clone.
+// Also see https://linux.die.net/man/2/clone.
#ifndef GTEST_HAS_CLONE
// The user didn't tell us, so we need to figure it out.
@@ -2407,6 +2409,16 @@ inline ::std::nullopt_t Nullopt() { return ::std::nullopt; }
#define GTEST_INTERNAL_HAS_OPTIONAL 0
#endif
+#ifdef __has_include
+#if __has_include() && GTEST_INTERNAL_CPLUSPLUS_LANG >= 202002L
+#define GTEST_INTERNAL_HAS_STD_SPAN 1
+#endif // __has_include() && GTEST_INTERNAL_CPLUSPLUS_LANG >= 202002L
+#endif // __has_include
+
+#ifndef GTEST_INTERNAL_HAS_STD_SPAN
+#define GTEST_INTERNAL_HAS_STD_SPAN 0
+#endif
+
#ifdef GTEST_HAS_ABSL
// Always use absl::string_view for Matcher<> specializations if googletest
// is built with absl support.
diff --git a/deps/googletest/src/gtest-death-test.cc b/deps/googletest/src/gtest-death-test.cc
index 0eb6e38b2a9664..8417a300f7c73d 100644
--- a/deps/googletest/src/gtest-death-test.cc
+++ b/deps/googletest/src/gtest-death-test.cc
@@ -783,7 +783,7 @@ DeathTest::TestRole WindowsDeathTest::AssumeRole() {
StreamableToString(static_cast(::GetCurrentProcessId())) +
// size_t has the same width as pointers on both 32-bit and 64-bit
// Windows platforms.
- // See http://msdn.microsoft.com/en-us/library/tcxf1dw6.aspx.
+ // See https://msdn.microsoft.com/en-us/library/tcxf1dw6.aspx.
"|" + StreamableToString(reinterpret_cast(write_handle)) + "|" +
StreamableToString(reinterpret_cast(event_handle_.Get()));
diff --git a/deps/googletest/src/gtest-internal-inl.h b/deps/googletest/src/gtest-internal-inl.h
index 5b7fcbd065091c..4799a1e7ba5d09 100644
--- a/deps/googletest/src/gtest-internal-inl.h
+++ b/deps/googletest/src/gtest-internal-inl.h
@@ -312,7 +312,7 @@ void ShuffleRange(internal::Random* random, int begin, int end,
<< begin << ", " << size << "].";
// Fisher-Yates shuffle, from
- // http://en.wikipedia.org/wiki/Fisher-Yates_shuffle
+ // https://en.wikipedia.org/wiki/Fisher-Yates_shuffle
for (int range_width = end - begin; range_width >= 2; range_width--) {
const int last_in_range = begin + range_width - 1;
const int selected =
diff --git a/deps/googletest/src/gtest-port.cc b/deps/googletest/src/gtest-port.cc
index 2aaf2bcc26c1e6..e9d12d92e7cb5a 100644
--- a/deps/googletest/src/gtest-port.cc
+++ b/deps/googletest/src/gtest-port.cc
@@ -158,13 +158,13 @@ size_t GetThreadCount() {
// we cannot detect it.
size_t GetThreadCount() {
int mib[] = {
- CTL_KERN,
- KERN_PROC,
- KERN_PROC_PID,
- getpid(),
+ CTL_KERN,
+ KERN_PROC,
+ KERN_PROC_PID,
+ getpid(),
#ifdef GTEST_OS_NETBSD
- sizeof(struct kinfo_proc),
- 1,
+ sizeof(struct kinfo_proc),
+ 1,
#endif
};
u_int miblen = sizeof(mib) / sizeof(mib[0]);
@@ -1028,6 +1028,16 @@ GTEST_DISABLE_MSC_DEPRECATED_PUSH_()
#if GTEST_HAS_STREAM_REDIRECTION
+namespace {
+
+#if defined(GTEST_OS_LINUX_ANDROID) || defined(GTEST_OS_IOS)
+bool EndsWithPathSeparator(const std::string& path) {
+ return !path.empty() && path.back() == GTEST_PATH_SEP_[0];
+}
+#endif
+
+} // namespace
+
// Object that captures an output stream (stdout/stderr).
class CapturedStream {
public:
@@ -1064,7 +1074,13 @@ class CapturedStream {
// The location /data/local/tmp is directly accessible from native code.
// '/sdcard' and other variants cannot be relied on, as they are not
// guaranteed to be mounted, or may have a delay in mounting.
- name_template = "/data/local/tmp/";
+ //
+ // However, prefer using the TMPDIR environment variable if set, as newer
+ // devices may have /data/local/tmp read-only.
+ name_template = TempDir();
+ if (!EndsWithPathSeparator(name_template))
+ name_template.push_back(GTEST_PATH_SEP_[0]);
+
#elif defined(GTEST_OS_IOS)
char user_temp_dir[PATH_MAX + 1];
@@ -1084,7 +1100,7 @@ class CapturedStream {
::confstr(_CS_DARWIN_USER_TEMP_DIR, user_temp_dir, sizeof(user_temp_dir));
name_template = user_temp_dir;
- if (name_template.back() != GTEST_PATH_SEP_[0])
+ if (!EndsWithPathSeparator(name_template))
name_template.push_back(GTEST_PATH_SEP_[0]);
#else
name_template = "/tmp/";
diff --git a/deps/googletest/src/gtest.cc b/deps/googletest/src/gtest.cc
index 30a5cc3f83a7e0..62dfef664e6836 100644
--- a/deps/googletest/src/gtest.cc
+++ b/deps/googletest/src/gtest.cc
@@ -879,7 +879,7 @@ int UnitTestOptions::GTestProcessSEH(DWORD seh_code, const char* location) {
// apparently).
//
// SEH exception code for C++ exceptions.
- // (see http://support.microsoft.com/kb/185294 for more information).
+ // (see https://support.microsoft.com/kb/185294 for more information).
const DWORD kCxxExceptionCode = 0xe06d7363;
if (!GTEST_FLAG_GET(catch_exceptions) || seh_code == kCxxExceptionCode ||
@@ -3228,7 +3228,8 @@ static const char* GetAnsiColorCode(GTestColor color) {
case GTestColor::kYellow:
return "3";
default:
- return nullptr;
+ assert(false);
+ return "9";
}
}
diff --git a/deps/nghttp2/lib/includes/nghttp2/nghttp2ver.h b/deps/nghttp2/lib/includes/nghttp2/nghttp2ver.h
index fba310c7888d0c..7f5251a6243e41 100644
--- a/deps/nghttp2/lib/includes/nghttp2/nghttp2ver.h
+++ b/deps/nghttp2/lib/includes/nghttp2/nghttp2ver.h
@@ -29,7 +29,7 @@
* @macro
* Version number of the nghttp2 library release
*/
-#define NGHTTP2_VERSION "1.55.1"
+#define NGHTTP2_VERSION "1.56.0"
/**
* @macro
@@ -37,6 +37,6 @@
* release. This is a 24 bit number with 8 bits for major number, 8 bits
* for minor and 8 bits for patch. Version 1.2.3 becomes 0x010203.
*/
-#define NGHTTP2_VERSION_NUM 0x013701
+#define NGHTTP2_VERSION_NUM 0x013800
#endif /* NGHTTP2VER_H */
diff --git a/deps/nghttp2/lib/nghttp2_frame.c b/deps/nghttp2/lib/nghttp2_frame.c
index 35072c15fc18e6..77cb463df5441f 100644
--- a/deps/nghttp2/lib/nghttp2_frame.c
+++ b/deps/nghttp2/lib/nghttp2_frame.c
@@ -418,8 +418,8 @@ void nghttp2_frame_unpack_priority_spec(nghttp2_priority_spec *pri_spec,
nghttp2_priority_spec_init(pri_spec, dep_stream_id, weight, exclusive);
}
-int nghttp2_frame_unpack_headers_payload(nghttp2_headers *frame,
- const uint8_t *payload) {
+void nghttp2_frame_unpack_headers_payload(nghttp2_headers *frame,
+ const uint8_t *payload) {
if (frame->hd.flags & NGHTTP2_FLAG_PRIORITY) {
nghttp2_frame_unpack_priority_spec(&frame->pri_spec, payload);
} else {
@@ -428,11 +428,9 @@ int nghttp2_frame_unpack_headers_payload(nghttp2_headers *frame,
frame->nva = NULL;
frame->nvlen = 0;
-
- return 0;
}
-int nghttp2_frame_pack_priority(nghttp2_bufs *bufs, nghttp2_priority *frame) {
+void nghttp2_frame_pack_priority(nghttp2_bufs *bufs, nghttp2_priority *frame) {
nghttp2_buf *buf;
assert(bufs->head == bufs->cur);
@@ -448,8 +446,6 @@ int nghttp2_frame_pack_priority(nghttp2_bufs *bufs, nghttp2_priority *frame) {
nghttp2_frame_pack_priority_spec(buf->last, &frame->pri_spec);
buf->last += NGHTTP2_PRIORITY_SPECLEN;
-
- return 0;
}
void nghttp2_frame_unpack_priority_payload(nghttp2_priority *frame,
@@ -457,8 +453,8 @@ void nghttp2_frame_unpack_priority_payload(nghttp2_priority *frame,
nghttp2_frame_unpack_priority_spec(&frame->pri_spec, payload);
}
-int nghttp2_frame_pack_rst_stream(nghttp2_bufs *bufs,
- nghttp2_rst_stream *frame) {
+void nghttp2_frame_pack_rst_stream(nghttp2_bufs *bufs,
+ nghttp2_rst_stream *frame) {
nghttp2_buf *buf;
assert(bufs->head == bufs->cur);
@@ -473,8 +469,6 @@ int nghttp2_frame_pack_rst_stream(nghttp2_bufs *bufs,
nghttp2_put_uint32be(buf->last, frame->error_code);
buf->last += 4;
-
- return 0;
}
void nghttp2_frame_unpack_rst_stream_payload(nghttp2_rst_stream *frame,
@@ -592,16 +586,15 @@ int nghttp2_frame_pack_push_promise(nghttp2_bufs *bufs,
return frame_pack_headers_shared(bufs, &frame->hd);
}
-int nghttp2_frame_unpack_push_promise_payload(nghttp2_push_promise *frame,
- const uint8_t *payload) {
+void nghttp2_frame_unpack_push_promise_payload(nghttp2_push_promise *frame,
+ const uint8_t *payload) {
frame->promised_stream_id =
nghttp2_get_uint32(payload) & NGHTTP2_STREAM_ID_MASK;
frame->nva = NULL;
frame->nvlen = 0;
- return 0;
}
-int nghttp2_frame_pack_ping(nghttp2_bufs *bufs, nghttp2_ping *frame) {
+void nghttp2_frame_pack_ping(nghttp2_bufs *bufs, nghttp2_ping *frame) {
nghttp2_buf *buf;
assert(bufs->head == bufs->cur);
@@ -616,8 +609,6 @@ int nghttp2_frame_pack_ping(nghttp2_bufs *bufs, nghttp2_ping *frame) {
buf->last =
nghttp2_cpymem(buf->last, frame->opaque_data, sizeof(frame->opaque_data));
-
- return 0;
}
void nghttp2_frame_unpack_ping_payload(nghttp2_ping *frame,
@@ -697,8 +688,8 @@ int nghttp2_frame_unpack_goaway_payload2(nghttp2_goaway *frame,
return 0;
}
-int nghttp2_frame_pack_window_update(nghttp2_bufs *bufs,
- nghttp2_window_update *frame) {
+void nghttp2_frame_pack_window_update(nghttp2_bufs *bufs,
+ nghttp2_window_update *frame) {
nghttp2_buf *buf;
assert(bufs->head == bufs->cur);
@@ -713,8 +704,6 @@ int nghttp2_frame_pack_window_update(nghttp2_bufs *bufs,
nghttp2_put_uint32be(buf->last, (uint32_t)frame->window_size_increment);
buf->last += 4;
-
- return 0;
}
void nghttp2_frame_unpack_window_update_payload(nghttp2_window_update *frame,
@@ -723,7 +712,7 @@ void nghttp2_frame_unpack_window_update_payload(nghttp2_window_update *frame,
nghttp2_get_uint32(payload) & NGHTTP2_WINDOW_SIZE_INCREMENT_MASK;
}
-int nghttp2_frame_pack_altsvc(nghttp2_bufs *bufs, nghttp2_extension *frame) {
+void nghttp2_frame_pack_altsvc(nghttp2_bufs *bufs, nghttp2_extension *frame) {
int rv;
nghttp2_buf *buf;
nghttp2_ext_altsvc *altsvc;
@@ -752,8 +741,6 @@ int nghttp2_frame_pack_altsvc(nghttp2_bufs *bufs, nghttp2_extension *frame) {
rv = nghttp2_bufs_add(bufs, altsvc->field_value, altsvc->field_value_len);
assert(rv == 0);
-
- return 0;
}
void nghttp2_frame_unpack_altsvc_payload(nghttp2_extension *frame,
@@ -901,8 +888,8 @@ int nghttp2_frame_unpack_origin_payload(nghttp2_extension *frame,
return 0;
}
-int nghttp2_frame_pack_priority_update(nghttp2_bufs *bufs,
- nghttp2_extension *frame) {
+void nghttp2_frame_pack_priority_update(nghttp2_bufs *bufs,
+ nghttp2_extension *frame) {
int rv;
nghttp2_buf *buf;
nghttp2_ext_priority_update *priority_update;
@@ -927,8 +914,6 @@ int nghttp2_frame_pack_priority_update(nghttp2_bufs *bufs,
priority_update->field_value_len);
assert(rv == 0);
-
- return 0;
}
void nghttp2_frame_unpack_priority_update_payload(nghttp2_extension *frame,
@@ -1186,14 +1171,14 @@ static void frame_set_pad(nghttp2_buf *buf, size_t padlen, int framehd_only) {
buf->last += trail_padlen;
}
-int nghttp2_frame_add_pad(nghttp2_bufs *bufs, nghttp2_frame_hd *hd,
- size_t padlen, int framehd_only) {
+void nghttp2_frame_add_pad(nghttp2_bufs *bufs, nghttp2_frame_hd *hd,
+ size_t padlen, int framehd_only) {
nghttp2_buf *buf;
if (padlen == 0) {
DEBUGF("send: padlen = 0, nothing to do\n");
- return 0;
+ return;
}
/*
@@ -1226,6 +1211,4 @@ int nghttp2_frame_add_pad(nghttp2_bufs *bufs, nghttp2_frame_hd *hd,
hd->flags |= NGHTTP2_FLAG_PADDED;
DEBUGF("send: final payloadlen=%zu, padlen=%zu\n", hd->length, padlen);
-
- return 0;
}
diff --git a/deps/nghttp2/lib/nghttp2_frame.h b/deps/nghttp2/lib/nghttp2_frame.h
index 5f6152b74587ae..d58668806c432a 100644
--- a/deps/nghttp2/lib/nghttp2_frame.h
+++ b/deps/nghttp2/lib/nghttp2_frame.h
@@ -143,11 +143,9 @@ int nghttp2_frame_pack_headers(nghttp2_bufs *bufs, nghttp2_headers *frame,
* Unpacks HEADERS frame byte sequence into |frame|. This function
* only unapcks bytes that come before name/value header block and
* after possible Pad Length field.
- *
- * This function always succeeds and returns 0.
*/
-int nghttp2_frame_unpack_headers_payload(nghttp2_headers *frame,
- const uint8_t *payload);
+void nghttp2_frame_unpack_headers_payload(nghttp2_headers *frame,
+ const uint8_t *payload);
/*
* Packs PRIORITY frame |frame| in wire format and store it in
@@ -155,10 +153,8 @@ int nghttp2_frame_unpack_headers_payload(nghttp2_headers *frame,
*
* The caller must make sure that nghttp2_bufs_reset(bufs) is called
* before calling this function.
- *
- * This function always succeeds and returns 0.
*/
-int nghttp2_frame_pack_priority(nghttp2_bufs *bufs, nghttp2_priority *frame);
+void nghttp2_frame_pack_priority(nghttp2_bufs *bufs, nghttp2_priority *frame);
/*
* Unpacks PRIORITY wire format into |frame|.
@@ -172,11 +168,9 @@ void nghttp2_frame_unpack_priority_payload(nghttp2_priority *frame,
*
* The caller must make sure that nghttp2_bufs_reset(bufs) is called
* before calling this function.
- *
- * This function always succeeds and returns 0.
*/
-int nghttp2_frame_pack_rst_stream(nghttp2_bufs *bufs,
- nghttp2_rst_stream *frame);
+void nghttp2_frame_pack_rst_stream(nghttp2_bufs *bufs,
+ nghttp2_rst_stream *frame);
/*
* Unpacks RST_STREAM frame byte sequence into |frame|.
@@ -265,15 +259,9 @@ int nghttp2_frame_pack_push_promise(nghttp2_bufs *bufs,
* Unpacks PUSH_PROMISE frame byte sequence into |frame|. This
* function only unapcks bytes that come before name/value header
* block and after possible Pad Length field.
- *
- * This function returns 0 if it succeeds or one of the following
- * negative error codes:
- *
- * NGHTTP2_ERR_PROTO
- * TODO END_HEADERS flag is not set
*/
-int nghttp2_frame_unpack_push_promise_payload(nghttp2_push_promise *frame,
- const uint8_t *payload);
+void nghttp2_frame_unpack_push_promise_payload(nghttp2_push_promise *frame,
+ const uint8_t *payload);
/*
* Packs PING frame |frame| in wire format and store it in
@@ -281,10 +269,8 @@ int nghttp2_frame_unpack_push_promise_payload(nghttp2_push_promise *frame,
*
* The caller must make sure that nghttp2_bufs_reset(bufs) is called
* before calling this function.
- *
- * This function always succeeds and returns 0.
*/
-int nghttp2_frame_pack_ping(nghttp2_bufs *bufs, nghttp2_ping *frame);
+void nghttp2_frame_pack_ping(nghttp2_bufs *bufs, nghttp2_ping *frame);
/*
* Unpacks PING wire format into |frame|.
@@ -343,11 +329,9 @@ int nghttp2_frame_unpack_goaway_payload2(nghttp2_goaway *frame,
*
* The caller must make sure that nghttp2_bufs_reset(bufs) is called
* before calling this function.
- *
- * This function always succeeds and returns 0.
*/
-int nghttp2_frame_pack_window_update(nghttp2_bufs *bufs,
- nghttp2_window_update *frame);
+void nghttp2_frame_pack_window_update(nghttp2_bufs *bufs,
+ nghttp2_window_update *frame);
/*
* Unpacks WINDOW_UPDATE frame byte sequence into |frame|.
@@ -361,17 +345,13 @@ void nghttp2_frame_unpack_window_update_payload(nghttp2_window_update *frame,
*
* The caller must make sure that nghttp2_bufs_reset(bufs) is called
* before calling this function.
- *
- * This function always succeeds and returns 0.
*/
-int nghttp2_frame_pack_altsvc(nghttp2_bufs *bufs, nghttp2_extension *ext);
+void nghttp2_frame_pack_altsvc(nghttp2_bufs *bufs, nghttp2_extension *ext);
/*
* Unpacks ALTSVC wire format into |frame|. The |payload| of
* |payloadlen| bytes contains frame payload. This function assumes
* that frame->payload points to the nghttp2_ext_altsvc object.
- *
- * This function always succeeds and returns 0.
*/
void nghttp2_frame_unpack_altsvc_payload(nghttp2_extension *frame,
size_t origin_len, uint8_t *payload,
@@ -431,19 +411,15 @@ int nghttp2_frame_unpack_origin_payload(nghttp2_extension *frame,
*
* The caller must make sure that nghttp2_bufs_reset(bufs) is called
* before calling this function.
- *
- * This function always succeeds and returns 0.
*/
-int nghttp2_frame_pack_priority_update(nghttp2_bufs *bufs,
- nghttp2_extension *ext);
+void nghttp2_frame_pack_priority_update(nghttp2_bufs *bufs,
+ nghttp2_extension *ext);
/*
* Unpacks PRIORITY_UPDATE wire format into |frame|. The |payload| of
* |payloadlen| bytes contains frame payload. This function assumes
* that frame->payload points to the nghttp2_ext_priority_update
* object.
- *
- * This function always succeeds and returns 0.
*/
void nghttp2_frame_unpack_priority_update_payload(nghttp2_extension *frame,
uint8_t *payload,
@@ -654,16 +630,8 @@ int nghttp2_iv_check(const nghttp2_settings_entry *iv, size_t niv);
* |padlen| including Pad Length field. The |hd| is the frame header
* for the serialized data. This function fills zeros padding region
* unless framehd_only is nonzero.
- *
- * This function returns 0 if it succeeds, or one of the following
- * negative error codes:
- *
- * NGHTTP2_ERR_NOMEM
- * Out of memory.
- * NGHTTP2_ERR_FRAME_SIZE_ERROR
- * The length of the resulting frame is too large.
*/
-int nghttp2_frame_add_pad(nghttp2_bufs *bufs, nghttp2_frame_hd *hd,
- size_t padlen, int framehd_only);
+void nghttp2_frame_add_pad(nghttp2_bufs *bufs, nghttp2_frame_hd *hd,
+ size_t padlen, int framehd_only);
#endif /* NGHTTP2_FRAME_H */
diff --git a/deps/nghttp2/lib/nghttp2_session.c b/deps/nghttp2/lib/nghttp2_session.c
index 71858a39e07db7..a45cbeef673108 100644
--- a/deps/nghttp2/lib/nghttp2_session.c
+++ b/deps/nghttp2/lib/nghttp2_session.c
@@ -937,8 +937,8 @@ static int session_ob_data_push(nghttp2_session *session,
return 0;
}
-static int session_ob_data_remove(nghttp2_session *session,
- nghttp2_stream *stream) {
+static void session_ob_data_remove(nghttp2_session *session,
+ nghttp2_stream *stream) {
uint32_t urgency;
assert(stream->flags & NGHTTP2_STREAM_FLAG_NO_RFC7540_PRIORITIES);
@@ -951,8 +951,6 @@ static int session_ob_data_remove(nghttp2_session *session,
nghttp2_pq_remove(&session->sched[urgency].ob_data, &stream->pq_entry);
stream->queued = 0;
-
- return 0;
}
static int session_attach_stream_item(nghttp2_session *session,
@@ -972,38 +970,28 @@ static int session_attach_stream_item(nghttp2_session *session,
return session_ob_data_push(session, stream);
}
-static int session_detach_stream_item(nghttp2_session *session,
- nghttp2_stream *stream) {
- int rv;
-
- rv = nghttp2_stream_detach_item(stream);
- if (rv != 0) {
- return rv;
- }
+static void session_detach_stream_item(nghttp2_session *session,
+ nghttp2_stream *stream) {
+ nghttp2_stream_detach_item(stream);
if (!(stream->flags & NGHTTP2_STREAM_FLAG_NO_RFC7540_PRIORITIES) ||
!stream->queued) {
- return 0;
+ return;
}
- return session_ob_data_remove(session, stream);
+ session_ob_data_remove(session, stream);
}
-static int session_defer_stream_item(nghttp2_session *session,
- nghttp2_stream *stream, uint8_t flags) {
- int rv;
-
- rv = nghttp2_stream_defer_item(stream, flags);
- if (rv != 0) {
- return rv;
- }
+static void session_defer_stream_item(nghttp2_session *session,
+ nghttp2_stream *stream, uint8_t flags) {
+ nghttp2_stream_defer_item(stream, flags);
if (!(stream->flags & NGHTTP2_STREAM_FLAG_NO_RFC7540_PRIORITIES) ||
!stream->queued) {
- return 0;
+ return;
}
- return session_ob_data_remove(session, stream);
+ session_ob_data_remove(session, stream);
}
static int session_resume_deferred_stream_item(nghttp2_session *session,
@@ -1476,11 +1464,7 @@ int nghttp2_session_close_stream(nghttp2_session *session, int32_t stream_id,
item = stream->item;
- rv = session_detach_stream_item(session, stream);
-
- if (rv != 0) {
- return rv;
- }
+ session_detach_stream_item(session, stream);
/* If item is queued, it will be deleted when it is popped
(nghttp2_session_prep_frame() will fail). If session->aob.item
@@ -2221,7 +2205,6 @@ static ssize_t session_call_select_padding(nghttp2_session *session,
frame->push_promise has also padlen in the same position. */
static int session_headers_add_pad(nghttp2_session *session,
nghttp2_frame *frame) {
- int rv;
ssize_t padded_payloadlen;
nghttp2_active_outbound_item *aob;
nghttp2_bufs *framebufs;
@@ -2246,11 +2229,7 @@ static int session_headers_add_pad(nghttp2_session *session,
DEBUGF("send: padding selected: payloadlen=%zd, padlen=%zu\n",
padded_payloadlen, padlen);
- rv = nghttp2_frame_add_pad(framebufs, &frame->hd, padlen, 0);
-
- if (rv != 0) {
- return rv;
- }
+ nghttp2_frame_add_pad(framebufs, &frame->hd, padlen, 0);
frame->headers.padlen = padlen;
@@ -2333,13 +2312,7 @@ static int session_prep_frame(nghttp2_session *session,
// Search stream including closed again.
stream = nghttp2_session_get_stream_raw(session, frame->hd.stream_id);
if (stream) {
- int rv2;
-
- rv2 = session_detach_stream_item(session, stream);
-
- if (nghttp2_is_fatal(rv2)) {
- return rv2;
- }
+ session_detach_stream_item(session, stream);
}
return rv;
@@ -2354,12 +2327,8 @@ static int session_prep_frame(nghttp2_session *session,
queue when session->remote_window_size > 0 */
assert(session->remote_window_size > 0);
- rv = session_defer_stream_item(session, stream,
- NGHTTP2_STREAM_FLAG_DEFERRED_FLOW_CONTROL);
-
- if (nghttp2_is_fatal(rv)) {
- return rv;
- }
+ session_defer_stream_item(session, stream,
+ NGHTTP2_STREAM_FLAG_DEFERRED_FLOW_CONTROL);
session->aob.item = NULL;
active_outbound_item_reset(&session->aob, mem);
@@ -2373,23 +2342,15 @@ static int session_prep_frame(nghttp2_session *session,
return rv;
}
if (rv == NGHTTP2_ERR_DEFERRED) {
- rv = session_defer_stream_item(session, stream,
- NGHTTP2_STREAM_FLAG_DEFERRED_USER);
-
- if (nghttp2_is_fatal(rv)) {
- return rv;
- }
+ session_defer_stream_item(session, stream,
+ NGHTTP2_STREAM_FLAG_DEFERRED_USER);
session->aob.item = NULL;
active_outbound_item_reset(&session->aob, mem);
return NGHTTP2_ERR_DEFERRED;
}
if (rv == NGHTTP2_ERR_TEMPORAL_CALLBACK_FAILURE) {
- rv = session_detach_stream_item(session, stream);
-
- if (nghttp2_is_fatal(rv)) {
- return rv;
- }
+ session_detach_stream_item(session, stream);
rv = nghttp2_session_add_rst_stream(session, frame->hd.stream_id,
NGHTTP2_INTERNAL_ERROR);
@@ -2399,13 +2360,7 @@ static int session_prep_frame(nghttp2_session *session,
return NGHTTP2_ERR_TEMPORAL_CALLBACK_FAILURE;
}
if (rv != 0) {
- int rv2;
-
- rv2 = session_detach_stream_item(session, stream);
-
- if (nghttp2_is_fatal(rv2)) {
- return rv2;
- }
+ session_detach_stream_item(session, stream);
return rv;
}
@@ -2907,10 +2862,7 @@ static int session_after_frame_sent1(nghttp2_session *session) {
}
if (stream && aux_data->eof) {
- rv = session_detach_stream_item(session, stream);
- if (nghttp2_is_fatal(rv)) {
- return rv;
- }
+ session_detach_stream_item(session, stream);
/* Call on_frame_send_callback after
nghttp2_stream_detach_item(), so that application can issue
@@ -3143,17 +3095,8 @@ static int session_after_frame_sent1(nghttp2_session *session) {
/*
* Called after a frame is sent and session_after_frame_sent1. This
* function is responsible to reset session->aob.
- *
- * This function returns 0 if it succeeds, or one of the following
- * negative error codes:
- *
- * NGHTTP2_ERR_NOMEM
- * Out of memory.
- * NGHTTP2_ERR_CALLBACK_FAILURE
- * The callback function failed.
*/
-static int session_after_frame_sent2(nghttp2_session *session) {
- int rv;
+static void session_after_frame_sent2(nghttp2_session *session) {
nghttp2_active_outbound_item *aob = &session->aob;
nghttp2_outbound_item *item = aob->item;
nghttp2_bufs *framebufs = &aob->framebufs;
@@ -3176,13 +3119,13 @@ static int session_after_frame_sent2(nghttp2_session *session) {
DEBUGF("send: next CONTINUATION frame, %zu bytes\n",
nghttp2_buf_len(&framebufs->cur->buf));
- return 0;
+ return;
}
}
active_outbound_item_reset(&session->aob, mem);
- return 0;
+ return;
}
/* DATA frame */
@@ -3196,7 +3139,7 @@ static int session_after_frame_sent2(nghttp2_session *session) {
if (aux_data->eof) {
active_outbound_item_reset(aob, mem);
- return 0;
+ return;
}
/* Reset no_copy here because next write may not use this. */
@@ -3208,22 +3151,18 @@ static int session_after_frame_sent2(nghttp2_session *session) {
further data. */
if (nghttp2_session_predicate_data_send(session, stream) != 0) {
if (stream) {
- rv = session_detach_stream_item(session, stream);
-
- if (nghttp2_is_fatal(rv)) {
- return rv;
- }
+ session_detach_stream_item(session, stream);
}
active_outbound_item_reset(aob, mem);
- return 0;
+ return;
}
aob->item = NULL;
active_outbound_item_reset(&session->aob, mem);
- return 0;
+ return;
}
static int session_call_send_data(nghttp2_session *session,
@@ -3454,7 +3393,7 @@ static ssize_t nghttp2_session_mem_send_internal(nghttp2_session *session,
/* Frame has completely sent */
if (fast_cb) {
- rv = session_after_frame_sent2(session);
+ session_after_frame_sent2(session);
} else {
rv = session_after_frame_sent1(session);
if (rv < 0) {
@@ -3462,12 +3401,7 @@ static ssize_t nghttp2_session_mem_send_internal(nghttp2_session *session,
assert(nghttp2_is_fatal(rv));
return rv;
}
- rv = session_after_frame_sent2(session);
- }
- if (rv < 0) {
- /* FATAL */
- assert(nghttp2_is_fatal(rv));
- return rv;
+ session_after_frame_sent2(session);
}
/* We have already adjusted the next state */
break;
@@ -3506,11 +3440,7 @@ static ssize_t nghttp2_session_mem_send_internal(nghttp2_session *session,
}
if (rv == NGHTTP2_ERR_TEMPORAL_CALLBACK_FAILURE) {
- rv = session_detach_stream_item(session, stream);
-
- if (nghttp2_is_fatal(rv)) {
- return rv;
- }
+ session_detach_stream_item(session, stream);
rv = nghttp2_session_add_rst_stream(session, frame->hd.stream_id,
NGHTTP2_INTERNAL_ERROR);
@@ -3534,11 +3464,7 @@ static ssize_t nghttp2_session_mem_send_internal(nghttp2_session *session,
assert(nghttp2_is_fatal(rv));
return rv;
}
- rv = session_after_frame_sent2(session);
- if (rv < 0) {
- assert(nghttp2_is_fatal(rv));
- return rv;
- }
+ session_after_frame_sent2(session);
/* We have already adjusted the next state */
@@ -4424,17 +4350,12 @@ int nghttp2_session_on_headers_received(nghttp2_session *session,
}
static int session_process_headers_frame(nghttp2_session *session) {
- int rv;
nghttp2_inbound_frame *iframe = &session->iframe;
nghttp2_frame *frame = &iframe->frame;
nghttp2_stream *stream;
- rv = nghttp2_frame_unpack_headers_payload(&frame->headers, iframe->sbuf.pos);
+ nghttp2_frame_unpack_headers_payload(&frame->headers, iframe->sbuf.pos);
- if (rv != 0) {
- return nghttp2_session_terminate_session_with_reason(
- session, NGHTTP2_PROTOCOL_ERROR, "HEADERS: could not unpack");
- }
stream = nghttp2_session_get_stream(session, frame->hd.stream_id);
if (!stream) {
frame->headers.cat = NGHTTP2_HCAT_REQUEST;
@@ -5097,17 +5018,11 @@ int nghttp2_session_on_push_promise_received(nghttp2_session *session,
}
static int session_process_push_promise_frame(nghttp2_session *session) {
- int rv;
nghttp2_inbound_frame *iframe = &session->iframe;
nghttp2_frame *frame = &iframe->frame;
- rv = nghttp2_frame_unpack_push_promise_payload(&frame->push_promise,
- iframe->sbuf.pos);
-
- if (rv != 0) {
- return nghttp2_session_terminate_session_with_reason(
- session, NGHTTP2_PROTOCOL_ERROR, "PUSH_PROMISE: could not unpack");
- }
+ nghttp2_frame_unpack_push_promise_payload(&frame->push_promise,
+ iframe->sbuf.pos);
return nghttp2_session_on_push_promise_received(session, frame);
}
@@ -7808,11 +7723,8 @@ int nghttp2_session_pack_data(nghttp2_session *session, nghttp2_bufs *bufs,
nghttp2_frame_pack_frame_hd(buf->pos, &frame->hd);
- rv = nghttp2_frame_add_pad(bufs, &frame->hd, frame->data.padlen,
- aux_data->no_copy);
- if (rv != 0) {
- return rv;
- }
+ nghttp2_frame_add_pad(bufs, &frame->hd, frame->data.padlen,
+ aux_data->no_copy);
session_reschedule_stream(session, stream);
diff --git a/deps/nghttp2/lib/nghttp2_stream.c b/deps/nghttp2/lib/nghttp2_stream.c
index b3614a0b02761e..f1951f879d7927 100644
--- a/deps/nghttp2/lib/nghttp2_stream.c
+++ b/deps/nghttp2/lib/nghttp2_stream.c
@@ -465,14 +465,12 @@ static int stream_update_dep_on_attach_item(nghttp2_stream *stream) {
return 0;
}
-static int stream_update_dep_on_detach_item(nghttp2_stream *stream) {
+static void stream_update_dep_on_detach_item(nghttp2_stream *stream) {
if (nghttp2_pq_empty(&stream->obq)) {
stream_obq_remove(stream);
}
validate_tree(stream);
-
- return 0;
}
int nghttp2_stream_attach_item(nghttp2_stream *stream,
@@ -503,20 +501,20 @@ int nghttp2_stream_attach_item(nghttp2_stream *stream,
return 0;
}
-int nghttp2_stream_detach_item(nghttp2_stream *stream) {
+void nghttp2_stream_detach_item(nghttp2_stream *stream) {
DEBUGF("stream: stream=%d detach item=%p\n", stream->stream_id, stream->item);
stream->item = NULL;
stream->flags = (uint8_t)(stream->flags & ~NGHTTP2_STREAM_FLAG_DEFERRED_ALL);
if (stream->flags & NGHTTP2_STREAM_FLAG_NO_RFC7540_PRIORITIES) {
- return 0;
+ return;
}
- return stream_update_dep_on_detach_item(stream);
+ stream_update_dep_on_detach_item(stream);
}
-int nghttp2_stream_defer_item(nghttp2_stream *stream, uint8_t flags) {
+void nghttp2_stream_defer_item(nghttp2_stream *stream, uint8_t flags) {
assert(stream->item);
DEBUGF("stream: stream=%d defer item=%p cause=%02x\n", stream->stream_id,
@@ -525,10 +523,10 @@ int nghttp2_stream_defer_item(nghttp2_stream *stream, uint8_t flags) {
stream->flags |= flags;
if (stream->flags & NGHTTP2_STREAM_FLAG_NO_RFC7540_PRIORITIES) {
- return 0;
+ return;
}
- return stream_update_dep_on_detach_item(stream);
+ stream_update_dep_on_detach_item(stream);
}
int nghttp2_stream_resume_deferred_item(nghttp2_stream *stream, uint8_t flags) {
diff --git a/deps/nghttp2/lib/nghttp2_stream.h b/deps/nghttp2/lib/nghttp2_stream.h
index 7a8e4c6c1ddb08..71b9fb1140c932 100644
--- a/deps/nghttp2/lib/nghttp2_stream.h
+++ b/deps/nghttp2/lib/nghttp2_stream.h
@@ -258,14 +258,8 @@ void nghttp2_stream_shutdown(nghttp2_stream *stream, nghttp2_shut_flag flag);
* more of NGHTTP2_STREAM_FLAG_DEFERRED_USER and
* NGHTTP2_STREAM_FLAG_DEFERRED_FLOW_CONTROL. The |flags| indicates
* the reason of this action.
- *
- * This function returns 0 if it succeeds, or one of the following
- * negative error codes:
- *
- * NGHTTP2_ERR_NOMEM
- * Out of memory
*/
-int nghttp2_stream_defer_item(nghttp2_stream *stream, uint8_t flags);
+void nghttp2_stream_defer_item(nghttp2_stream *stream, uint8_t flags);
/*
* Put back deferred data in this stream to active state. The |flags|
@@ -379,14 +373,8 @@ int nghttp2_stream_attach_item(nghttp2_stream *stream,
/*
* Detaches |stream->item|. This function does not free
* |stream->item|. The caller must free it.
- *
- * This function returns 0 if it succeeds, or one of the following
- * negative error codes:
- *
- * NGHTTP2_ERR_NOMEM
- * Out of memory
*/
-int nghttp2_stream_detach_item(nghttp2_stream *stream);
+void nghttp2_stream_detach_item(nghttp2_stream *stream);
/*
* Makes the |stream| depend on the |dep_stream|. This dependency is
diff --git a/deps/undici/src/README.md b/deps/undici/src/README.md
index 05a5d21ed1195c..3ba89890df6f69 100644
--- a/deps/undici/src/README.md
+++ b/deps/undici/src/README.md
@@ -18,30 +18,34 @@ npm i undici
## Benchmarks
The benchmark is a simple `hello world` [example](benchmarks/benchmark.js) using a
-number of unix sockets (connections) with a pipelining depth of 10 running on Node 16.
-The benchmarks below have the [simd](https://github.com/WebAssembly/simd) feature enabled.
+number of unix sockets (connections) with a pipelining depth of 10 running on Node 20.6.0.
### Connections 1
+
| Tests | Samples | Result | Tolerance | Difference with slowest |
|---------------------|---------|---------------|-----------|-------------------------|
-| http - no keepalive | 15 | 4.63 req/sec | ± 2.77 % | - |
-| http - keepalive | 10 | 4.81 req/sec | ± 2.16 % | + 3.94 % |
-| undici - stream | 25 | 62.22 req/sec | ± 2.67 % | + 1244.58 % |
-| undici - dispatch | 15 | 64.33 req/sec | ± 2.47 % | + 1290.24 % |
-| undici - request | 15 | 66.08 req/sec | ± 2.48 % | + 1327.88 % |
-| undici - pipeline | 10 | 66.13 req/sec | ± 1.39 % | + 1329.08 % |
+| http - no keepalive | 15 | 5.32 req/sec | ± 2.61 % | - |
+| http - keepalive | 10 | 5.35 req/sec | ± 2.47 % | + 0.44 % |
+| undici - fetch | 15 | 41.85 req/sec | ± 2.49 % | + 686.04 % |
+| undici - pipeline | 40 | 50.36 req/sec | ± 2.77 % | + 845.92 % |
+| undici - stream | 15 | 60.58 req/sec | ± 2.75 % | + 1037.72 % |
+| undici - request | 10 | 61.19 req/sec | ± 2.60 % | + 1049.24 % |
+| undici - dispatch | 20 | 64.84 req/sec | ± 2.81 % | + 1117.81 % |
+
### Connections 50
| Tests | Samples | Result | Tolerance | Difference with slowest |
|---------------------|---------|------------------|-----------|-------------------------|
-| http - no keepalive | 50 | 3546.49 req/sec | ± 2.90 % | - |
-| http - keepalive | 15 | 5692.67 req/sec | ± 2.48 % | + 60.52 % |
-| undici - pipeline | 25 | 8478.71 req/sec | ± 2.62 % | + 139.07 % |
-| undici - request | 20 | 9766.66 req/sec | ± 2.79 % | + 175.39 % |
-| undici - stream | 15 | 10109.74 req/sec | ± 2.94 % | + 185.06 % |
-| undici - dispatch | 25 | 10949.73 req/sec | ± 2.54 % | + 208.75 % |
+| undici - fetch | 30 | 2107.19 req/sec | ± 2.69 % | - |
+| http - no keepalive | 10 | 2698.90 req/sec | ± 2.68 % | + 28.08 % |
+| http - keepalive | 10 | 4639.49 req/sec | ± 2.55 % | + 120.17 % |
+| undici - pipeline | 40 | 6123.33 req/sec | ± 2.97 % | + 190.59 % |
+| undici - stream | 50 | 9426.51 req/sec | ± 2.92 % | + 347.35 % |
+| undici - request | 10 | 10162.88 req/sec | ± 2.13 % | + 382.29 % |
+| undici - dispatch | 50 | 11191.11 req/sec | ± 2.98 % | + 431.09 % |
+
## Quick Start
@@ -432,6 +436,7 @@ and `undici.Agent`) which will enable the family autoselection algorithm when es
* [__Ethan Arrowood__](https://github.com/ethan-arrowood),
* [__Matteo Collina__](https://github.com/mcollina),
* [__Robert Nagy__](https://github.com/ronag),
+* [__Matthew Aitken__](https://github.com/KhafraDev),
## License
diff --git a/deps/undici/src/docs/api/Client.md b/deps/undici/src/docs/api/Client.md
index fc7c5d26e8f799..c0987713a328c5 100644
--- a/deps/undici/src/docs/api/Client.md
+++ b/deps/undici/src/docs/api/Client.md
@@ -17,11 +17,13 @@ Returns: `Client`
### Parameter: `ClientOptions`
+> ⚠️ Warning: The `H2` support is experimental.
+
* **bodyTimeout** `number | null` (optional) - Default: `300e3` - The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use `0` to disable it entirely. Defaults to 300 seconds.
-* **headersTimeout** `number | null` (optional) - Default: `300e3` - The amount of time the parser will wait to receive the complete HTTP headers while not sending the request. Defaults to 300 seconds.
-* **keepAliveMaxTimeout** `number | null` (optional) - Default: `600e3` - The maximum allowed `keepAliveTimeout` when overridden by *keep-alive* hints from the server. Defaults to 10 minutes.
-* **keepAliveTimeout** `number | null` (optional) - Default: `4e3` - The timeout after which a socket without active requests will time out. Monitors time between activity on a connected socket. This value may be overridden by *keep-alive* hints from the server. See [MDN: HTTP - Headers - Keep-Alive directives](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Keep-Alive#directives) for more details. Defaults to 4 seconds.
-* **keepAliveTimeoutThreshold** `number | null` (optional) - Default: `1e3` - A number subtracted from server *keep-alive* hints when overriding `keepAliveTimeout` to account for timing inaccuracies caused by e.g. transport latency. Defaults to 1 second.
+* **headersTimeout** `number | null` (optional) - Default: `300e3` - The amount of time, in milliseconds, the parser will wait to receive the complete HTTP headers while not sending the request. Defaults to 300 seconds.
+* **keepAliveMaxTimeout** `number | null` (optional) - Default: `600e3` - The maximum allowed `keepAliveTimeout`, in milliseconds, when overridden by *keep-alive* hints from the server. Defaults to 10 minutes.
+* **keepAliveTimeout** `number | null` (optional) - Default: `4e3` - The timeout, in milliseconds, after which a socket without active requests will time out. Monitors time between activity on a connected socket. This value may be overridden by *keep-alive* hints from the server. See [MDN: HTTP - Headers - Keep-Alive directives](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Keep-Alive#directives) for more details. Defaults to 4 seconds.
+* **keepAliveTimeoutThreshold** `number | null` (optional) - Default: `1e3` - A number of milliseconds subtracted from server *keep-alive* hints when overriding `keepAliveTimeout` to account for timing inaccuracies caused by e.g. transport latency. Defaults to 1 second.
* **maxHeaderSize** `number | null` (optional) - Default: `16384` - The maximum length of request headers in bytes. Defaults to 16KiB.
* **maxResponseSize** `number | null` (optional) - Default: `-1` - The maximum length of response body in bytes. Set to `-1` to disable.
* **pipelining** `number | null` (optional) - Default: `1` - The amount of concurrent requests to be sent over the single TCP/TLS connection according to [RFC7230](https://tools.ietf.org/html/rfc7230#section-6.3.2). Carefully consider your workload and environment before enabling concurrent requests as pipelining may reduce performance if used incorrectly. Pipelining is sensitive to network stack settings as well as head of line blocking caused by e.g. long running requests. Set to `0` to disable keep-alive connections.
@@ -30,6 +32,8 @@ Returns: `Client`
* **interceptors** `{ Client: DispatchInterceptor[] }` - Default: `[RedirectInterceptor]` - A list of interceptors that are applied to the dispatch method. Additional logic can be applied (such as, but not limited to: 302 status code handling, authentication, cookies, compression and caching). Note that the behavior of interceptors is Experimental and might change at any given time.
* **autoSelectFamily**: `boolean` (optional) - Default: depends on local Node version, on Node 18.13.0 and above is `false`. Enables a family autodetection algorithm that loosely implements section 5 of [RFC 8305](https://tools.ietf.org/html/rfc8305#section-5). See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details. This option is ignored if not supported by the current Node version.
* **autoSelectFamilyAttemptTimeout**: `number` - Default: depends on local Node version, on Node 18.13.0 and above is `250`. The amount of time in milliseconds to wait for a connection attempt to finish before trying the next address when using the `autoSelectFamily` option. See [here](https://nodejs.org/api/net.html#socketconnectoptions-connectlistener) for more details.
+* **allowH2**: `boolean` - Default: `false`. Enables support for H2 if the server has assigned bigger priority to it through ALPN negotiation.
+* **maxConcurrentStreams**: `number` - Default: `100`. Dictates the maximum number of concurrent streams for a single H2 session. It can be overriden by a SETTINGS remote frame.
#### Parameter: `ConnectOptions`
@@ -38,7 +42,7 @@ Furthermore, the following options can be passed:
* **socketPath** `string | null` (optional) - Default: `null` - An IPC endpoint, either Unix domain socket or Windows named pipe.
* **maxCachedSessions** `number | null` (optional) - Default: `100` - Maximum number of TLS cached sessions. Use 0 to disable TLS session caching. Default: 100.
-* **timeout** `number | null` (optional) - Default `10e3`
+* **timeout** `number | null` (optional) - In milliseconds, Default `10e3`.
* **servername** `string | null` (optional)
* **keepAlive** `boolean | null` (optional) - Default: `true` - TCP keep-alive enabled
* **keepAliveInitialDelay** `number | null` (optional) - Default: `60000` - TCP keep-alive interval for the socket in milliseconds
diff --git a/deps/undici/src/docs/api/Connector.md b/deps/undici/src/docs/api/Connector.md
index 7c966507e5fceb..56821bd6430279 100644
--- a/deps/undici/src/docs/api/Connector.md
+++ b/deps/undici/src/docs/api/Connector.md
@@ -13,8 +13,8 @@ Every Tls option, see [here](https://nodejs.org/api/tls.html#tls_tls_connect_opt
Furthermore, the following options can be passed:
* **socketPath** `string | null` (optional) - Default: `null` - An IPC endpoint, either Unix domain socket or Windows named pipe.
-* **maxCachedSessions** `number | null` (optional) - Default: `100` - Maximum number of TLS cached sessions. Use 0 to disable TLS session caching. Default: 100.
-* **timeout** `number | null` (optional) - Default `10e3`
+* **maxCachedSessions** `number | null` (optional) - Default: `100` - Maximum number of TLS cached sessions. Use 0 to disable TLS session caching. Default: `100`.
+* **timeout** `number | null` (optional) - In milliseconds. Default `10e3`.
* **servername** `string | null` (optional)
Once you call `buildConnector`, it will return a connector function, which takes the following parameters.
diff --git a/deps/undici/src/docs/api/Dispatcher.md b/deps/undici/src/docs/api/Dispatcher.md
index a50642948aaca1..fd463bfea16737 100644
--- a/deps/undici/src/docs/api/Dispatcher.md
+++ b/deps/undici/src/docs/api/Dispatcher.md
@@ -200,8 +200,9 @@ Returns: `Boolean` - `false` if dispatcher is busy and further dispatch calls wo
* **blocking** `boolean` (optional) - Default: `false` - Whether the response is expected to take a long time and would end up blocking the pipeline. When this is set to `true` further pipelining will be avoided on the same connection until headers have been received.
* **upgrade** `string | null` (optional) - Default: `null` - Upgrade the request. Should be used to specify the kind of upgrade i.e. `'Websocket'`.
* **bodyTimeout** `number | null` (optional) - The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use `0` to disable it entirely. Defaults to 300 seconds.
-* **headersTimeout** `number | null` (optional) - The amount of time the parser will wait to receive the complete HTTP headers while not sending the request. Defaults to 300 seconds.
+* **headersTimeout** `number | null` (optional) - The amount of time, in milliseconds, the parser will wait to receive the complete HTTP headers while not sending the request. Defaults to 300 seconds.
* **throwOnError** `boolean` (optional) - Default: `false` - Whether Undici should throw an error upon receiving a 4xx or 5xx response from the server.
+* **expectContinue** `boolean` (optional) - Default: `false` - For H2, it appends the expect: 100-continue header, and halts the request body until a 100-continue is received from the remote server
#### Parameter: `DispatchHandler`
diff --git a/deps/undici/src/docs/api/MockPool.md b/deps/undici/src/docs/api/MockPool.md
index 923c157aa64657..de53914002eca3 100644
--- a/deps/undici/src/docs/api/MockPool.md
+++ b/deps/undici/src/docs/api/MockPool.md
@@ -35,7 +35,8 @@ const mockPool = mockAgent.get('http://localhost:3000')
### `MockPool.intercept(options)`
-This method defines the interception rules for matching against requests for a MockPool or MockPool. We can intercept multiple times on a single instance.
+This method defines the interception rules for matching against requests for a MockPool or MockPool. We can intercept multiple times on a single instance, but each intercept is only used once.
+For example if you expect to make 2 requests inside a test, you need to call `intercept()` twice. Assuming you use `disableNetConnect()` you will get `MockNotMatchedError` on the second request when you only call `intercept()` once.
When defining interception rules, all the rules must pass for a request to be intercepted. If a request is not intercepted, a real request will be attempted.
diff --git a/deps/undici/src/index-fetch.js b/deps/undici/src/index-fetch.js
index 0d59d254f7d548..23ac530600769e 100644
--- a/deps/undici/src/index-fetch.js
+++ b/deps/undici/src/index-fetch.js
@@ -2,9 +2,9 @@
const fetchImpl = require('./lib/fetch').fetch
-module.exports.fetch = async function fetch (resource) {
+module.exports.fetch = async function fetch (resource, init = undefined) {
try {
- return await fetchImpl(...arguments)
+ return await fetchImpl(resource, init)
} catch (err) {
Error.captureStackTrace(err, this)
throw err
@@ -14,3 +14,4 @@ module.exports.FormData = require('./lib/fetch/formdata').FormData
module.exports.Headers = require('./lib/fetch/headers').Headers
module.exports.Response = require('./lib/fetch/response').Response
module.exports.Request = require('./lib/fetch/request').Request
+module.exports.WebSocket = require('./lib/websocket/websocket').WebSocket
diff --git a/deps/undici/src/index.d.ts b/deps/undici/src/index.d.ts
index 0730677b29e419..83a786d6a03131 100644
--- a/deps/undici/src/index.d.ts
+++ b/deps/undici/src/index.d.ts
@@ -1,57 +1,3 @@
-import Dispatcher from'./types/dispatcher'
-import { setGlobalDispatcher, getGlobalDispatcher } from './types/global-dispatcher'
-import { setGlobalOrigin, getGlobalOrigin } from './types/global-origin'
-import Pool from'./types/pool'
-import { RedirectHandler, DecoratorHandler } from './types/handlers'
-
-import BalancedPool from './types/balanced-pool'
-import Client from'./types/client'
-import buildConnector from'./types/connector'
-import errors from'./types/errors'
-import Agent from'./types/agent'
-import MockClient from'./types/mock-client'
-import MockPool from'./types/mock-pool'
-import MockAgent from'./types/mock-agent'
-import mockErrors from'./types/mock-errors'
-import ProxyAgent from'./types/proxy-agent'
-import { request, pipeline, stream, connect, upgrade } from './types/api'
-
-export * from './types/cookies'
-export * from './types/fetch'
-export * from './types/file'
-export * from './types/filereader'
-export * from './types/formdata'
-export * from './types/diagnostics-channel'
-export * from './types/websocket'
-export * from './types/content-type'
-export * from './types/cache'
-export { Interceptable } from './types/mock-interceptor'
-
-export { Dispatcher, BalancedPool, Pool, Client, buildConnector, errors, Agent, request, stream, pipeline, connect, upgrade, setGlobalDispatcher, getGlobalDispatcher, setGlobalOrigin, getGlobalOrigin, MockClient, MockPool, MockAgent, mockErrors, ProxyAgent, RedirectHandler, DecoratorHandler }
+export * from './types/index'
+import Undici from './types/index'
export default Undici
-
-declare namespace Undici {
- var Dispatcher: typeof import('./types/dispatcher').default
- var Pool: typeof import('./types/pool').default;
- var RedirectHandler: typeof import ('./types/handlers').RedirectHandler
- var DecoratorHandler: typeof import ('./types/handlers').DecoratorHandler
- var createRedirectInterceptor: typeof import ('./types/interceptors').createRedirectInterceptor
- var BalancedPool: typeof import('./types/balanced-pool').default;
- var Client: typeof import('./types/client').default;
- var buildConnector: typeof import('./types/connector').default;
- var errors: typeof import('./types/errors').default;
- var Agent: typeof import('./types/agent').default;
- var setGlobalDispatcher: typeof import('./types/global-dispatcher').setGlobalDispatcher;
- var getGlobalDispatcher: typeof import('./types/global-dispatcher').getGlobalDispatcher;
- var request: typeof import('./types/api').request;
- var stream: typeof import('./types/api').stream;
- var pipeline: typeof import('./types/api').pipeline;
- var connect: typeof import('./types/api').connect;
- var upgrade: typeof import('./types/api').upgrade;
- var MockClient: typeof import('./types/mock-client').default;
- var MockPool: typeof import('./types/mock-pool').default;
- var MockAgent: typeof import('./types/mock-agent').default;
- var mockErrors: typeof import('./types/mock-errors').default;
- var fetch: typeof import('./types/fetch').fetch;
- var caches: typeof import('./types/cache').caches;
-}
diff --git a/deps/undici/src/index.js b/deps/undici/src/index.js
index 7e8831ceeea3ea..7c0c8adcd6c809 100644
--- a/deps/undici/src/index.js
+++ b/deps/undici/src/index.js
@@ -106,7 +106,10 @@ if (util.nodeMajor > 16 || (util.nodeMajor === 16 && util.nodeMinor >= 8)) {
try {
return await fetchImpl(...arguments)
} catch (err) {
- Error.captureStackTrace(err, this)
+ if (typeof err === 'object') {
+ Error.captureStackTrace(err, this)
+ }
+
throw err
}
}
diff --git a/deps/undici/src/lib/api/api-connect.js b/deps/undici/src/lib/api/api-connect.js
index 0503b1a2f0eb10..fd2b6ad97a52d5 100644
--- a/deps/undici/src/lib/api/api-connect.js
+++ b/deps/undici/src/lib/api/api-connect.js
@@ -1,7 +1,7 @@
'use strict'
-const { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors')
const { AsyncResource } = require('async_hooks')
+const { InvalidArgumentError, RequestAbortedError, SocketError } = require('../core/errors')
const util = require('../core/util')
const { addSignal, removeSignal } = require('./abort-signal')
@@ -50,7 +50,13 @@ class ConnectHandler extends AsyncResource {
removeSignal(this)
this.callback = null
- const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+
+ let headers = rawHeaders
+ // Indicates is an HTTP2Session
+ if (headers != null) {
+ headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders)
+ }
+
this.runInAsyncScope(callback, null, null, {
statusCode,
headers,
diff --git a/deps/undici/src/lib/api/api-request.js b/deps/undici/src/lib/api/api-request.js
index 71d7e926b4c395..f130ecc9867a88 100644
--- a/deps/undici/src/lib/api/api-request.js
+++ b/deps/undici/src/lib/api/api-request.js
@@ -95,7 +95,6 @@ class RequestHandler extends AsyncResource {
this.callback = null
this.res = body
-
if (callback !== null) {
if (this.throwOnError && statusCode >= 400) {
this.runInAsyncScope(getResolveErrorBodyCallback, null,
diff --git a/deps/undici/src/lib/cache/cache.js b/deps/undici/src/lib/cache/cache.js
index 18f06a348a0a88..9b3110860cd6b8 100644
--- a/deps/undici/src/lib/cache/cache.js
+++ b/deps/undici/src/lib/cache/cache.js
@@ -379,11 +379,7 @@ class Cache {
const reader = stream.getReader()
// 11.3
- readAllBytes(
- reader,
- (bytes) => bodyReadPromise.resolve(bytes),
- (error) => bodyReadPromise.reject(error)
- )
+ readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject)
} else {
bodyReadPromise.resolve(undefined)
}
diff --git a/deps/undici/src/lib/client.js b/deps/undici/src/lib/client.js
index 7d9ec8d7c272b3..b5170d4f88da9b 100644
--- a/deps/undici/src/lib/client.js
+++ b/deps/undici/src/lib/client.js
@@ -6,6 +6,7 @@
const assert = require('assert')
const net = require('net')
+const { pipeline } = require('stream')
const util = require('./core/util')
const timers = require('./timers')
const Request = require('./core/request')
@@ -67,8 +68,40 @@ const {
kDispatch,
kInterceptors,
kLocalAddress,
- kMaxResponseSize
+ kMaxResponseSize,
+ kHTTPConnVersion,
+ // HTTP2
+ kHost,
+ kHTTP2Session,
+ kHTTP2SessionState,
+ kHTTP2BuildRequest,
+ kHTTP2CopyHeaders,
+ kHTTP1BuildRequest
} = require('./core/symbols')
+
+/** @type {import('http2')} */
+let http2
+try {
+ http2 = require('http2')
+} catch {
+ // @ts-ignore
+ http2 = { constants: {} }
+}
+
+const {
+ constants: {
+ HTTP2_HEADER_AUTHORITY,
+ HTTP2_HEADER_METHOD,
+ HTTP2_HEADER_PATH,
+ HTTP2_HEADER_CONTENT_LENGTH,
+ HTTP2_HEADER_EXPECT,
+ HTTP2_HEADER_STATUS
+ }
+} = http2
+
+// Experimental
+let h2ExperimentalWarned = false
+
const FastBuffer = Buffer[Symbol.species]
const kClosedResolve = Symbol('kClosedResolve')
@@ -122,7 +155,10 @@ class Client extends DispatcherBase {
localAddress,
maxResponseSize,
autoSelectFamily,
- autoSelectFamilyAttemptTimeout
+ autoSelectFamilyAttemptTimeout,
+ // h2
+ allowH2,
+ maxConcurrentStreams
} = {}) {
super()
@@ -205,10 +241,20 @@ class Client extends DispatcherBase {
throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number')
}
+ // h2
+ if (allowH2 != null && typeof allowH2 !== 'boolean') {
+ throw new InvalidArgumentError('allowH2 must be a valid boolean value')
+ }
+
+ if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) {
+ throw new InvalidArgumentError('maxConcurrentStreams must be a possitive integer, greater than 0')
+ }
+
if (typeof connect !== 'function') {
connect = buildConnector({
...tls,
maxCachedSessions,
+ allowH2,
socketPath,
timeout: connectTimeout,
...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
@@ -240,6 +286,18 @@ class Client extends DispatcherBase {
this[kMaxRequests] = maxRequestsPerClient
this[kClosedResolve] = null
this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1
+ this[kHTTPConnVersion] = 'h1'
+
+ // HTTP/2
+ this[kHTTP2Session] = null
+ this[kHTTP2SessionState] = !allowH2
+ ? null
+ : {
+ // streams: null, // Fixed queue of streams - For future support of `push`
+ openStreams: 0, // Keep track of them to decide wether or not unref the session
+ maxConcurrentStreams: maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server
+ }
+ this[kHost] = `${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}`
// kQueue is built up of 3 sections separated by
// the kRunningIdx and kPendingIdx indices.
@@ -298,7 +356,9 @@ class Client extends DispatcherBase {
[kDispatch] (opts, handler) {
const origin = opts.origin || this[kUrl].origin
- const request = new Request(origin, opts, handler)
+ const request = this[kHTTPConnVersion] === 'h2'
+ ? Request[kHTTP2BuildRequest](origin, opts, handler)
+ : Request[kHTTP1BuildRequest](origin, opts, handler)
this[kQueue].push(request)
if (this[kResuming]) {
@@ -319,6 +379,8 @@ class Client extends DispatcherBase {
}
async [kClose] () {
+ // TODO: for H2 we need to gracefully flush the remaining enqueued
+ // request and close each stream.
return new Promise((resolve) => {
if (!this[kSize]) {
resolve(null)
@@ -345,6 +407,12 @@ class Client extends DispatcherBase {
resolve()
}
+ if (this[kHTTP2Session] != null) {
+ util.destroy(this[kHTTP2Session], err)
+ this[kHTTP2Session] = null
+ this[kHTTP2SessionState] = null
+ }
+
if (!this[kSocket]) {
queueMicrotask(callback)
} else {
@@ -356,6 +424,64 @@ class Client extends DispatcherBase {
}
}
+function onHttp2SessionError (err) {
+ assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
+
+ this[kSocket][kError] = err
+
+ onError(this[kClient], err)
+}
+
+function onHttp2FrameError (type, code, id) {
+ const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`)
+
+ if (id === 0) {
+ this[kSocket][kError] = err
+ onError(this[kClient], err)
+ }
+}
+
+function onHttp2SessionEnd () {
+ util.destroy(this, new SocketError('other side closed'))
+ util.destroy(this[kSocket], new SocketError('other side closed'))
+}
+
+function onHTTP2GoAway (code) {
+ const client = this[kClient]
+ const err = new InformationalError(`HTTP/2: "GOAWAY" frame received with code ${code}`)
+ client[kSocket] = null
+ client[kHTTP2Session] = null
+
+ if (client.destroyed) {
+ assert(this[kPending] === 0)
+
+ // Fail entire queue.
+ const requests = client[kQueue].splice(client[kRunningIdx])
+ for (let i = 0; i < requests.length; i++) {
+ const request = requests[i]
+ errorRequest(this, request, err)
+ }
+ } else if (client[kRunning] > 0) {
+ // Fail head of pipeline.
+ const request = client[kQueue][client[kRunningIdx]]
+ client[kQueue][client[kRunningIdx]++] = null
+
+ errorRequest(client, request, err)
+ }
+
+ client[kPendingIdx] = client[kRunningIdx]
+
+ assert(client[kRunning] === 0)
+
+ client.emit('disconnect',
+ client[kUrl],
+ [client],
+ err
+ )
+
+ resume(client)
+}
+
const constants = require('./llhttp/constants')
const createRedirectInterceptor = require('./interceptor/redirectInterceptor')
const EMPTY_BUF = Buffer.alloc(0)
@@ -946,16 +1072,18 @@ function onSocketReadable () {
}
function onSocketError (err) {
- const { [kParser]: parser } = this
+ const { [kClient]: client, [kParser]: parser } = this
assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID')
- // On Mac OS, we get an ECONNRESET even if there is a full body to be forwarded
- // to the user.
- if (err.code === 'ECONNRESET' && parser.statusCode && !parser.shouldKeepAlive) {
- // We treat all incoming data so for as a valid response.
- parser.onMessageComplete()
- return
+ if (client[kHTTPConnVersion] !== 'h2') {
+ // On Mac OS, we get an ECONNRESET even if there is a full body to be forwarded
+ // to the user.
+ if (err.code === 'ECONNRESET' && parser.statusCode && !parser.shouldKeepAlive) {
+ // We treat all incoming data so for as a valid response.
+ parser.onMessageComplete()
+ return
+ }
}
this[kError] = err
@@ -984,27 +1112,31 @@ function onError (client, err) {
}
function onSocketEnd () {
- const { [kParser]: parser } = this
+ const { [kParser]: parser, [kClient]: client } = this
- if (parser.statusCode && !parser.shouldKeepAlive) {
- // We treat all incoming data so far as a valid response.
- parser.onMessageComplete()
- return
+ if (client[kHTTPConnVersion] !== 'h2') {
+ if (parser.statusCode && !parser.shouldKeepAlive) {
+ // We treat all incoming data so far as a valid response.
+ parser.onMessageComplete()
+ return
+ }
}
util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this)))
}
function onSocketClose () {
- const { [kClient]: client } = this
+ const { [kClient]: client, [kParser]: parser } = this
- if (!this[kError] && this[kParser].statusCode && !this[kParser].shouldKeepAlive) {
- // We treat all incoming data so far as a valid response.
- this[kParser].onMessageComplete()
- }
+ if (client[kHTTPConnVersion] === 'h1' && parser) {
+ if (!this[kError] && parser.statusCode && !parser.shouldKeepAlive) {
+ // We treat all incoming data so far as a valid response.
+ parser.onMessageComplete()
+ }
- this[kParser].destroy()
- this[kParser] = null
+ this[kParser].destroy()
+ this[kParser] = null
+ }
const err = this[kError] || new SocketError('closed', util.getSocketInfo(this))
@@ -1092,24 +1224,54 @@ async function connect (client) {
return
}
- if (!llhttpInstance) {
- llhttpInstance = await llhttpPromise
- llhttpPromise = null
- }
-
client[kConnecting] = false
assert(socket)
- socket[kNoRef] = false
- socket[kWriting] = false
- socket[kReset] = false
- socket[kBlocking] = false
- socket[kError] = null
- socket[kParser] = new Parser(client, socket, llhttpInstance)
- socket[kClient] = client
+ const isH2 = socket.alpnProtocol === 'h2'
+ if (isH2) {
+ if (!h2ExperimentalWarned) {
+ h2ExperimentalWarned = true
+ process.emitWarning('H2 support is experimental, expect them to change at any time.', {
+ code: 'UNDICI-H2'
+ })
+ }
+
+ const session = http2.connect(client[kUrl], {
+ createConnection: () => socket,
+ peerMaxConcurrentStreams: client[kHTTP2SessionState].maxConcurrentStreams
+ })
+
+ client[kHTTPConnVersion] = 'h2'
+ session[kClient] = client
+ session[kSocket] = socket
+ session.on('error', onHttp2SessionError)
+ session.on('frameError', onHttp2FrameError)
+ session.on('end', onHttp2SessionEnd)
+ session.on('goaway', onHTTP2GoAway)
+ session.on('close', onSocketClose)
+ session.unref()
+
+ client[kHTTP2Session] = session
+ socket[kHTTP2Session] = session
+ } else {
+ if (!llhttpInstance) {
+ llhttpInstance = await llhttpPromise
+ llhttpPromise = null
+ }
+
+ socket[kNoRef] = false
+ socket[kWriting] = false
+ socket[kReset] = false
+ socket[kBlocking] = false
+ socket[kParser] = new Parser(client, socket, llhttpInstance)
+ }
+
socket[kCounter] = 0
socket[kMaxRequests] = client[kMaxRequests]
+ socket[kClient] = client
+ socket[kError] = null
+
socket
.on('error', onSocketError)
.on('readable', onSocketReadable)
@@ -1208,7 +1370,7 @@ function _resume (client, sync) {
const socket = client[kSocket]
- if (socket && !socket.destroyed) {
+ if (socket && !socket.destroyed && socket.alpnProtocol !== 'h2') {
if (client[kSize] === 0) {
if (!socket[kNoRef] && socket.unref) {
socket.unref()
@@ -1273,7 +1435,7 @@ function _resume (client, sync) {
return
}
- if (!socket) {
+ if (!socket && !client[kHTTP2Session]) {
connect(client)
return
}
@@ -1334,6 +1496,11 @@ function _resume (client, sync) {
}
function write (client, request) {
+ if (client[kHTTPConnVersion] === 'h2') {
+ writeH2(client, client[kHTTP2Session], request)
+ return
+ }
+
const { body, method, path, host, upgrade, headers, blocking, reset } = request
// https://tools.ietf.org/html/rfc7231#section-4.3.1
@@ -1489,9 +1656,291 @@ function write (client, request) {
return true
}
-function writeStream ({ body, client, request, socket, contentLength, header, expectsPayload }) {
+function writeH2 (client, session, request) {
+ const { body, method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request
+
+ let headers
+ if (typeof reqHeaders === 'string') headers = Request[kHTTP2CopyHeaders](reqHeaders.trim())
+ else headers = reqHeaders
+
+ if (upgrade) {
+ errorRequest(client, request, new Error('Upgrade not supported for H2'))
+ return false
+ }
+
+ try {
+ // TODO(HTTP/2): Should we call onConnect immediately or on stream ready event?
+ request.onConnect((err) => {
+ if (request.aborted || request.completed) {
+ return
+ }
+
+ errorRequest(client, request, err || new RequestAbortedError())
+ })
+ } catch (err) {
+ errorRequest(client, request, err)
+ }
+
+ if (request.aborted) {
+ return false
+ }
+
+ let stream
+ const h2State = client[kHTTP2SessionState]
+
+ headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost]
+ headers[HTTP2_HEADER_PATH] = path
+
+ if (method === 'CONNECT') {
+ session.ref()
+ // we are already connected, streams are pending, first request
+ // will create a new stream. We trigger a request to create the stream and wait until
+ // `ready` event is triggered
+ // We disabled endStream to allow the user to write to the stream
+ stream = session.request(headers, { endStream: false, signal })
+
+ if (stream.id && !stream.pending) {
+ request.onUpgrade(null, null, stream)
+ ++h2State.openStreams
+ } else {
+ stream.once('ready', () => {
+ request.onUpgrade(null, null, stream)
+ ++h2State.openStreams
+ })
+ }
+
+ stream.once('close', () => {
+ h2State.openStreams -= 1
+ // TODO(HTTP/2): unref only if current streams count is 0
+ if (h2State.openStreams === 0) session.unref()
+ })
+
+ return true
+ } else {
+ headers[HTTP2_HEADER_METHOD] = method
+ }
+
+ // https://tools.ietf.org/html/rfc7231#section-4.3.1
+ // https://tools.ietf.org/html/rfc7231#section-4.3.2
+ // https://tools.ietf.org/html/rfc7231#section-4.3.5
+
+ // Sending a payload body on a request that does not
+ // expect it can cause undefined behavior on some
+ // servers and corrupt connection state. Do not
+ // re-use the connection for further requests.
+
+ const expectsPayload = (
+ method === 'PUT' ||
+ method === 'POST' ||
+ method === 'PATCH'
+ )
+
+ if (body && typeof body.read === 'function') {
+ // Try to read EOF in order to get length.
+ body.read(0)
+ }
+
+ let contentLength = util.bodyLength(body)
+
+ if (contentLength == null) {
+ contentLength = request.contentLength
+ }
+
+ if (contentLength === 0 || !expectsPayload) {
+ // https://tools.ietf.org/html/rfc7230#section-3.3.2
+ // A user agent SHOULD NOT send a Content-Length header field when
+ // the request message does not contain a payload body and the method
+ // semantics do not anticipate such a body.
+
+ contentLength = null
+ }
+
+ if (request.contentLength != null && request.contentLength !== contentLength) {
+ if (client[kStrictContentLength]) {
+ errorRequest(client, request, new RequestContentLengthMismatchError())
+ return false
+ }
+
+ process.emitWarning(new RequestContentLengthMismatchError())
+ }
+
+ if (contentLength != null) {
+ assert(body, 'no body must not have content length')
+ headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}`
+ }
+
+ session.ref()
+
+ const shouldEndStream = method === 'GET' || method === 'HEAD'
+ if (expectContinue) {
+ headers[HTTP2_HEADER_EXPECT] = '100-continue'
+ /**
+ * @type {import('node:http2').ClientHttp2Stream}
+ */
+ stream = session.request(headers, { endStream: shouldEndStream, signal })
+
+ stream.once('continue', writeBodyH2)
+ } else {
+ /** @type {import('node:http2').ClientHttp2Stream} */
+ stream = session.request(headers, {
+ endStream: shouldEndStream,
+ signal
+ })
+ writeBodyH2()
+ }
+
+ // Increment counter as we have new several streams open
+ ++h2State.openStreams
+
+ stream.once('response', headers => {
+ if (request.onHeaders(Number(headers[HTTP2_HEADER_STATUS]), headers, stream.resume.bind(stream), '') === false) {
+ stream.pause()
+ }
+ })
+
+ stream.once('end', () => {
+ request.onComplete([])
+ })
+
+ stream.on('data', (chunk) => {
+ if (request.onData(chunk) === false) stream.pause()
+ })
+
+ stream.once('close', () => {
+ h2State.openStreams -= 1
+ // TODO(HTTP/2): unref only if current streams count is 0
+ if (h2State.openStreams === 0) session.unref()
+ })
+
+ stream.once('error', function (err) {
+ if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) {
+ h2State.streams -= 1
+ util.destroy(stream, err)
+ }
+ })
+
+ stream.once('frameError', (type, code) => {
+ const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`)
+ errorRequest(client, request, err)
+
+ if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) {
+ h2State.streams -= 1
+ util.destroy(stream, err)
+ }
+ })
+
+ // stream.on('aborted', () => {
+ // // TODO(HTTP/2): Support aborted
+ // })
+
+ // stream.on('timeout', () => {
+ // // TODO(HTTP/2): Support timeout
+ // })
+
+ // stream.on('push', headers => {
+ // // TODO(HTTP/2): Suppor push
+ // })
+
+ // stream.on('trailers', headers => {
+ // // TODO(HTTP/2): Support trailers
+ // })
+
+ return true
+
+ function writeBodyH2 () {
+ /* istanbul ignore else: assertion */
+ if (!body) {
+ request.onRequestSent()
+ } else if (util.isBuffer(body)) {
+ assert(contentLength === body.byteLength, 'buffer body must have content length')
+ stream.cork()
+ stream.write(body)
+ stream.uncork()
+ request.onBodySent(body)
+ request.onRequestSent()
+ } else if (util.isBlobLike(body)) {
+ if (typeof body.stream === 'function') {
+ writeIterable({
+ client,
+ request,
+ contentLength,
+ h2stream: stream,
+ expectsPayload,
+ body: body.stream(),
+ socket: client[kSocket],
+ header: ''
+ })
+ } else {
+ writeBlob({
+ body,
+ client,
+ request,
+ contentLength,
+ expectsPayload,
+ h2stream: stream,
+ header: '',
+ socket: client[kSocket]
+ })
+ }
+ } else if (util.isStream(body)) {
+ writeStream({
+ body,
+ client,
+ request,
+ contentLength,
+ expectsPayload,
+ socket: client[kSocket],
+ h2stream: stream,
+ header: ''
+ })
+ } else if (util.isIterable(body)) {
+ writeIterable({
+ body,
+ client,
+ request,
+ contentLength,
+ expectsPayload,
+ header: '',
+ h2stream: stream,
+ socket: client[kSocket]
+ })
+ } else {
+ assert(false)
+ }
+ }
+}
+
+function writeStream ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {
assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined')
+ if (client[kHTTPConnVersion] === 'h2') {
+ // For HTTP/2, is enough to pipe the stream
+ const pipe = pipeline(
+ body,
+ h2stream,
+ (err) => {
+ if (err) {
+ util.destroy(body, err)
+ util.destroy(h2stream, err)
+ } else {
+ request.onRequestSent()
+ }
+ }
+ )
+
+ pipe.on('data', onPipeData)
+ pipe.once('end', () => {
+ pipe.removeListener('data', onPipeData)
+ util.destroy(pipe)
+ })
+
+ function onPipeData (chunk) {
+ request.onBodySent(chunk)
+ }
+
+ return
+ }
+
let finished = false
const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header })
@@ -1572,9 +2021,10 @@ function writeStream ({ body, client, request, socket, contentLength, header, ex
.on('error', onFinished)
}
-async function writeBlob ({ body, client, request, socket, contentLength, header, expectsPayload }) {
+async function writeBlob ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {
assert(contentLength === body.size, 'blob body must have content length')
+ const isH2 = client[kHTTPConnVersion] === 'h2'
try {
if (contentLength != null && contentLength !== body.size) {
throw new RequestContentLengthMismatchError()
@@ -1582,10 +2032,16 @@ async function writeBlob ({ body, client, request, socket, contentLength, header
const buffer = Buffer.from(await body.arrayBuffer())
- socket.cork()
- socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1')
- socket.write(buffer)
- socket.uncork()
+ if (isH2) {
+ h2stream.cork()
+ h2stream.write(buffer)
+ h2stream.uncork()
+ } else {
+ socket.cork()
+ socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1')
+ socket.write(buffer)
+ socket.uncork()
+ }
request.onBodySent(buffer)
request.onRequestSent()
@@ -1596,11 +2052,11 @@ async function writeBlob ({ body, client, request, socket, contentLength, header
resume(client)
} catch (err) {
- util.destroy(socket, err)
+ util.destroy(isH2 ? h2stream : socket, err)
}
}
-async function writeIterable ({ body, client, request, socket, contentLength, header, expectsPayload }) {
+async function writeIterable ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {
assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined')
let callback = null
@@ -1622,6 +2078,33 @@ async function writeIterable ({ body, client, request, socket, contentLength, he
}
})
+ if (client[kHTTPConnVersion] === 'h2') {
+ h2stream
+ .on('close', onDrain)
+ .on('drain', onDrain)
+
+ try {
+ // It's up to the user to somehow abort the async iterable.
+ for await (const chunk of body) {
+ if (socket[kError]) {
+ throw socket[kError]
+ }
+
+ if (!h2stream.write(chunk)) {
+ await waitForDrain()
+ }
+ }
+ } catch (err) {
+ h2stream.destroy(err)
+ } finally {
+ h2stream
+ .off('close', onDrain)
+ .off('drain', onDrain)
+ }
+
+ return
+ }
+
socket
.on('close', onDrain)
.on('drain', onDrain)
diff --git a/deps/undici/src/lib/core/connect.js b/deps/undici/src/lib/core/connect.js
index f3b5cc33edd6cf..bb71085a1565fc 100644
--- a/deps/undici/src/lib/core/connect.js
+++ b/deps/undici/src/lib/core/connect.js
@@ -71,7 +71,7 @@ if (global.FinalizationRegistry) {
}
}
-function buildConnector ({ maxCachedSessions, socketPath, timeout, ...opts }) {
+function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) {
if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) {
throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero')
}
@@ -79,7 +79,7 @@ function buildConnector ({ maxCachedSessions, socketPath, timeout, ...opts }) {
const options = { path: socketPath, ...opts }
const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions)
timeout = timeout == null ? 10e3 : timeout
-
+ allowH2 = allowH2 != null ? allowH2 : false
return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) {
let socket
if (protocol === 'https:') {
@@ -99,6 +99,8 @@ function buildConnector ({ maxCachedSessions, socketPath, timeout, ...opts }) {
servername,
session,
localAddress,
+ // TODO(HTTP/2): Add support for h2c
+ ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'],
socket: httpSocket, // upgrade socket connection
port: port || 443,
host: hostname
diff --git a/deps/undici/src/lib/core/request.js b/deps/undici/src/lib/core/request.js
index 6c9a24d5d590d7..e3b0c7b9dbf06c 100644
--- a/deps/undici/src/lib/core/request.js
+++ b/deps/undici/src/lib/core/request.js
@@ -5,6 +5,7 @@ const {
NotSupportedError
} = require('./errors')
const assert = require('assert')
+const { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = require('./symbols')
const util = require('./util')
// tokenRegExp and headerCharRegex have been lifted from
@@ -62,7 +63,8 @@ class Request {
headersTimeout,
bodyTimeout,
reset,
- throwOnError
+ throwOnError,
+ expectContinue
}, handler) {
if (typeof path !== 'string') {
throw new InvalidArgumentError('path must be a string')
@@ -98,6 +100,10 @@ class Request {
throw new InvalidArgumentError('invalid reset')
}
+ if (expectContinue != null && typeof expectContinue !== 'boolean') {
+ throw new InvalidArgumentError('invalid expectContinue')
+ }
+
this.headersTimeout = headersTimeout
this.bodyTimeout = bodyTimeout
@@ -150,6 +156,9 @@ class Request {
this.headers = ''
+ // Only for H2
+ this.expectContinue = expectContinue != null ? expectContinue : false
+
if (Array.isArray(headers)) {
if (headers.length % 2 !== 0) {
throw new InvalidArgumentError('headers array must be even')
@@ -269,13 +278,64 @@ class Request {
return this[kHandler].onError(error)
}
+ // TODO: adjust to support H2
addHeader (key, value) {
processHeader(this, key, value)
return this
}
+
+ static [kHTTP1BuildRequest] (origin, opts, handler) {
+ // TODO: Migrate header parsing here, to make Requests
+ // HTTP agnostic
+ return new Request(origin, opts, handler)
+ }
+
+ static [kHTTP2BuildRequest] (origin, opts, handler) {
+ const headers = opts.headers
+ opts = { ...opts, headers: null }
+
+ const request = new Request(origin, opts, handler)
+
+ request.headers = {}
+
+ if (Array.isArray(headers)) {
+ if (headers.length % 2 !== 0) {
+ throw new InvalidArgumentError('headers array must be even')
+ }
+ for (let i = 0; i < headers.length; i += 2) {
+ processHeader(request, headers[i], headers[i + 1], true)
+ }
+ } else if (headers && typeof headers === 'object') {
+ const keys = Object.keys(headers)
+ for (let i = 0; i < keys.length; i++) {
+ const key = keys[i]
+ processHeader(request, key, headers[key], true)
+ }
+ } else if (headers != null) {
+ throw new InvalidArgumentError('headers must be an object or an array')
+ }
+
+ return request
+ }
+
+ static [kHTTP2CopyHeaders] (raw) {
+ const rawHeaders = raw.split('\r\n')
+ const headers = {}
+
+ for (const header of rawHeaders) {
+ const [key, value] = header.split(': ')
+
+ if (value == null || value.length === 0) continue
+
+ if (headers[key]) headers[key] += `,${value}`
+ else headers[key] = value
+ }
+
+ return headers
+ }
}
-function processHeaderValue (key, val) {
+function processHeaderValue (key, val, skipAppend) {
if (val && typeof val === 'object') {
throw new InvalidArgumentError(`invalid ${key} header`)
}
@@ -286,10 +346,10 @@ function processHeaderValue (key, val) {
throw new InvalidArgumentError(`invalid ${key} header`)
}
- return `${key}: ${val}\r\n`
+ return skipAppend ? val : `${key}: ${val}\r\n`
}
-function processHeader (request, key, val) {
+function processHeader (request, key, val, skipAppend = false) {
if (val && (typeof val === 'object' && !Array.isArray(val))) {
throw new InvalidArgumentError(`invalid ${key} header`)
} else if (val === undefined) {
@@ -357,10 +417,16 @@ function processHeader (request, key, val) {
} else {
if (Array.isArray(val)) {
for (let i = 0; i < val.length; i++) {
- request.headers += processHeaderValue(key, val[i])
+ if (skipAppend) {
+ if (request.headers[key]) request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}`
+ else request.headers[key] = processHeaderValue(key, val[i], skipAppend)
+ } else {
+ request.headers += processHeaderValue(key, val[i])
+ }
}
} else {
- request.headers += processHeaderValue(key, val)
+ if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend)
+ else request.headers += processHeaderValue(key, val)
}
}
}
diff --git a/deps/undici/src/lib/core/symbols.js b/deps/undici/src/lib/core/symbols.js
index c852107a72af26..c2492f4355fd2a 100644
--- a/deps/undici/src/lib/core/symbols.js
+++ b/deps/undici/src/lib/core/symbols.js
@@ -51,5 +51,11 @@ module.exports = {
kProxy: Symbol('proxy agent options'),
kCounter: Symbol('socket request counter'),
kInterceptors: Symbol('dispatch interceptors'),
- kMaxResponseSize: Symbol('max response size')
+ kMaxResponseSize: Symbol('max response size'),
+ kHTTP2Session: Symbol('http2Session'),
+ kHTTP2SessionState: Symbol('http2Session state'),
+ kHTTP2BuildRequest: Symbol('http2 build request'),
+ kHTTP1BuildRequest: Symbol('http1 build request'),
+ kHTTP2CopyHeaders: Symbol('http2 copy headers'),
+ kHTTPConnVersion: Symbol('http connection version')
}
diff --git a/deps/undici/src/lib/core/util.js b/deps/undici/src/lib/core/util.js
index 4f8c1f8f1a1a4a..259ba7b38a64e9 100644
--- a/deps/undici/src/lib/core/util.js
+++ b/deps/undici/src/lib/core/util.js
@@ -168,7 +168,7 @@ function bodyLength (body) {
return 0
} else if (isStream(body)) {
const state = body._readableState
- return state && state.ended === true && Number.isFinite(state.length)
+ return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length)
? state.length
: null
} else if (isBlobLike(body)) {
@@ -199,6 +199,7 @@ function destroy (stream, err) {
// See: https://github.com/nodejs/node/pull/38505/files
stream.socket = null
}
+
stream.destroy(err)
} else if (err) {
process.nextTick((stream, err) => {
@@ -218,6 +219,9 @@ function parseKeepAliveTimeout (val) {
}
function parseHeaders (headers, obj = {}) {
+ // For H2 support
+ if (!Array.isArray(headers)) return headers
+
for (let i = 0; i < headers.length; i += 2) {
const key = headers[i].toString().toLowerCase()
let val = obj[key]
@@ -355,6 +359,12 @@ function getSocketInfo (socket) {
}
}
+async function * convertIterableToBuffer (iterable) {
+ for await (const chunk of iterable) {
+ yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk)
+ }
+}
+
let ReadableStream
function ReadableStreamFrom (iterable) {
if (!ReadableStream) {
@@ -362,8 +372,7 @@ function ReadableStreamFrom (iterable) {
}
if (ReadableStream.from) {
- // https://github.com/whatwg/streams/pull/1083
- return ReadableStream.from(iterable)
+ return ReadableStream.from(convertIterableToBuffer(iterable))
}
let iterator
diff --git a/deps/undici/src/lib/fetch/body.js b/deps/undici/src/lib/fetch/body.js
index 0c7b8b65363f43..105eb553157b06 100644
--- a/deps/undici/src/lib/fetch/body.js
+++ b/deps/undici/src/lib/fetch/body.js
@@ -387,6 +387,7 @@ function bodyMixinMethods (instance) {
try {
busboy = Busboy({
headers,
+ preservePath: true,
defParamCharset: 'utf8'
})
} catch (err) {
@@ -532,7 +533,7 @@ async function specConsumeBody (object, convertBytesToJSValue, instance) {
// 6. Otherwise, fully read object’s body given successSteps,
// errorSteps, and object’s relevant global object.
- fullyReadBody(object[kState].body, successSteps, errorSteps)
+ await fullyReadBody(object[kState].body, successSteps, errorSteps)
// 7. Return promise.
return promise.promise
diff --git a/deps/undici/src/lib/fetch/index.js b/deps/undici/src/lib/fetch/index.js
index d615f07ea272d1..50f1b9f3fcdcc1 100644
--- a/deps/undici/src/lib/fetch/index.js
+++ b/deps/undici/src/lib/fetch/index.js
@@ -1760,7 +1760,7 @@ async function httpNetworkFetch (
fetchParams.controller.connection.destroy()
// 2. Return the appropriate network error for fetchParams.
- return makeAppropriateNetworkError(fetchParams)
+ return makeAppropriateNetworkError(fetchParams, err)
}
return makeNetworkError(err)
@@ -1979,19 +1979,37 @@ async function httpNetworkFetch (
let location = ''
const headers = new Headers()
- for (let n = 0; n < headersList.length; n += 2) {
- const key = headersList[n + 0].toString('latin1')
- const val = headersList[n + 1].toString('latin1')
- if (key.toLowerCase() === 'content-encoding') {
- // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
- // "All content-coding values are case-insensitive..."
- codings = val.toLowerCase().split(',').map((x) => x.trim()).reverse()
- } else if (key.toLowerCase() === 'location') {
- location = val
+ // For H2, the headers are a plain JS object
+ // We distinguish between them and iterate accordingly
+ if (Array.isArray(headersList)) {
+ for (let n = 0; n < headersList.length; n += 2) {
+ const key = headersList[n + 0].toString('latin1')
+ const val = headersList[n + 1].toString('latin1')
+ if (key.toLowerCase() === 'content-encoding') {
+ // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
+ // "All content-coding values are case-insensitive..."
+ codings = val.toLowerCase().split(',').map((x) => x.trim())
+ } else if (key.toLowerCase() === 'location') {
+ location = val
+ }
+
+ headers.append(key, val)
}
+ } else {
+ const keys = Object.keys(headersList)
+ for (const key of keys) {
+ const val = headersList[key]
+ if (key.toLowerCase() === 'content-encoding') {
+ // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1
+ // "All content-coding values are case-insensitive..."
+ codings = val.toLowerCase().split(',').map((x) => x.trim()).reverse()
+ } else if (key.toLowerCase() === 'location') {
+ location = val
+ }
- headers.append(key, val)
+ headers.append(key, val)
+ }
}
this.body = new Readable({ read: resume })
diff --git a/deps/undici/src/lib/fetch/response.js b/deps/undici/src/lib/fetch/response.js
index 1029dbef53371f..88deb71a06285e 100644
--- a/deps/undici/src/lib/fetch/response.js
+++ b/deps/undici/src/lib/fetch/response.js
@@ -49,7 +49,7 @@ class Response {
}
// https://fetch.spec.whatwg.org/#dom-response-json
- static json (data = undefined, init = {}) {
+ static json (data, init = {}) {
webidl.argumentLengthCheck(arguments, 1, { header: 'Response.json' })
if (init !== null) {
@@ -426,15 +426,15 @@ function filterResponse (response, type) {
}
// https://fetch.spec.whatwg.org/#appropriate-network-error
-function makeAppropriateNetworkError (fetchParams) {
+function makeAppropriateNetworkError (fetchParams, err = null) {
// 1. Assert: fetchParams is canceled.
assert(isCancelled(fetchParams))
// 2. Return an aborted network error if fetchParams is aborted;
// otherwise return a network error.
return isAborted(fetchParams)
- ? makeNetworkError(new DOMException('The operation was aborted.', 'AbortError'))
- : makeNetworkError('Request was cancelled.')
+ ? makeNetworkError(Object.assign(new DOMException('The operation was aborted.', 'AbortError'), { cause: err }))
+ : makeNetworkError(Object.assign(new DOMException('Request was cancelled.'), { cause: err }))
}
// https://whatpr.org/fetch/1392.html#initialize-a-response
diff --git a/deps/undici/src/lib/fetch/util.js b/deps/undici/src/lib/fetch/util.js
index 400687ba2e7d23..fcbba84bc9a8b0 100644
--- a/deps/undici/src/lib/fetch/util.js
+++ b/deps/undici/src/lib/fetch/util.js
@@ -556,16 +556,37 @@ function bytesMatch (bytes, metadataList) {
const algorithm = item.algo
// 2. Let expectedValue be the val component of item.
- const expectedValue = item.hash
+ let expectedValue = item.hash
+
+ // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e
+ // "be liberal with padding". This is annoying, and it's not even in the spec.
+
+ if (expectedValue.endsWith('==')) {
+ expectedValue = expectedValue.slice(0, -2)
+ }
// 3. Let actualValue be the result of applying algorithm to bytes.
- const actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
+ let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64')
+
+ if (actualValue.endsWith('==')) {
+ actualValue = actualValue.slice(0, -2)
+ }
// 4. If actualValue is a case-sensitive match for expectedValue,
// return true.
if (actualValue === expectedValue) {
return true
}
+
+ let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest('base64url')
+
+ if (actualBase64URL.endsWith('==')) {
+ actualBase64URL = actualBase64URL.slice(0, -2)
+ }
+
+ if (actualBase64URL === expectedValue) {
+ return true
+ }
}
// 6. Return false.
@@ -812,17 +833,17 @@ function iteratorResult (pair, kind) {
/**
* @see https://fetch.spec.whatwg.org/#body-fully-read
*/
-function fullyReadBody (body, processBody, processBodyError) {
+async function fullyReadBody (body, processBody, processBodyError) {
// 1. If taskDestination is null, then set taskDestination to
// the result of starting a new parallel queue.
// 2. Let successSteps given a byte sequence bytes be to queue a
// fetch task to run processBody given bytes, with taskDestination.
- const successSteps = (bytes) => queueMicrotask(() => processBody(bytes))
+ const successSteps = processBody
// 3. Let errorSteps be to queue a fetch task to run processBodyError,
// with taskDestination.
- const errorSteps = (error) => queueMicrotask(() => processBodyError(error))
+ const errorSteps = processBodyError
// 4. Let reader be the result of getting a reader for body’s stream.
// If that threw an exception, then run errorSteps with that
@@ -837,7 +858,12 @@ function fullyReadBody (body, processBody, processBodyError) {
}
// 5. Read all bytes from reader, given successSteps and errorSteps.
- readAllBytes(reader, successSteps, errorSteps)
+ try {
+ const result = await readAllBytes(reader)
+ successSteps(result)
+ } catch (e) {
+ errorSteps(e)
+ }
}
/** @type {ReadableStream} */
@@ -906,36 +932,23 @@ function isomorphicEncode (input) {
* @see https://streams.spec.whatwg.org/#readablestreamdefaultreader-read-all-bytes
* @see https://streams.spec.whatwg.org/#read-loop
* @param {ReadableStreamDefaultReader} reader
- * @param {(bytes: Uint8Array) => void} successSteps
- * @param {(error: Error) => void} failureSteps
*/
-async function readAllBytes (reader, successSteps, failureSteps) {
+async function readAllBytes (reader) {
const bytes = []
let byteLength = 0
while (true) {
- let done
- let chunk
-
- try {
- ({ done, value: chunk } = await reader.read())
- } catch (e) {
- // 1. Call failureSteps with e.
- failureSteps(e)
- return
- }
+ const { done, value: chunk } = await reader.read()
if (done) {
// 1. Call successSteps with bytes.
- successSteps(Buffer.concat(bytes, byteLength))
- return
+ return Buffer.concat(bytes, byteLength)
}
// 1. If chunk is not a Uint8Array object, call failureSteps
// with a TypeError and abort these steps.
if (!isUint8Array(chunk)) {
- failureSteps(new TypeError('Received non-Uint8Array chunk'))
- return
+ throw new TypeError('Received non-Uint8Array chunk')
}
// 2. Append the bytes represented by chunk to bytes.
diff --git a/deps/undici/src/lib/pool.js b/deps/undici/src/lib/pool.js
index 93b3158f21a131..08509958069a4f 100644
--- a/deps/undici/src/lib/pool.js
+++ b/deps/undici/src/lib/pool.js
@@ -34,6 +34,7 @@ class Pool extends PoolBase {
socketPath,
autoSelectFamily,
autoSelectFamilyAttemptTimeout,
+ allowH2,
...options
} = {}) {
super()
@@ -54,6 +55,7 @@ class Pool extends PoolBase {
connect = buildConnector({
...tls,
maxCachedSessions,
+ allowH2,
socketPath,
timeout: connectTimeout == null ? 10e3 : connectTimeout,
...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined),
@@ -66,7 +68,7 @@ class Pool extends PoolBase {
: []
this[kConnections] = connections || null
this[kUrl] = util.parseOrigin(origin)
- this[kOptions] = { ...util.deepClone(options), connect }
+ this[kOptions] = { ...util.deepClone(options), connect, allowH2 }
this[kOptions].interceptors = options.interceptors
? { ...options.interceptors }
: undefined
diff --git a/deps/undici/src/lib/websocket/connection.js b/deps/undici/src/lib/websocket/connection.js
index 8c821899f6553e..e0fa69726b4054 100644
--- a/deps/undici/src/lib/websocket/connection.js
+++ b/deps/undici/src/lib/websocket/connection.js
@@ -1,6 +1,5 @@
'use strict'
-const { randomBytes, createHash } = require('crypto')
const diagnosticsChannel = require('diagnostics_channel')
const { uid, states } = require('./constants')
const {
@@ -22,6 +21,14 @@ channels.open = diagnosticsChannel.channel('undici:websocket:open')
channels.close = diagnosticsChannel.channel('undici:websocket:close')
channels.socketError = diagnosticsChannel.channel('undici:websocket:socket_error')
+/** @type {import('crypto')} */
+let crypto
+try {
+ crypto = require('crypto')
+} catch {
+
+}
+
/**
* @see https://websockets.spec.whatwg.org/#concept-websocket-establish
* @param {URL} url
@@ -66,7 +73,7 @@ function establishWebSocketConnection (url, protocols, ws, onEstablish, options)
// 5. Let keyValue be a nonce consisting of a randomly selected
// 16-byte value that has been forgiving-base64-encoded and
// isomorphic encoded.
- const keyValue = randomBytes(16).toString('base64')
+ const keyValue = crypto.randomBytes(16).toString('base64')
// 6. Append (`Sec-WebSocket-Key`, keyValue) to request’s
// header list.
@@ -148,7 +155,7 @@ function establishWebSocketConnection (url, protocols, ws, onEstablish, options)
// trailing whitespace, the client MUST _Fail the WebSocket
// Connection_.
const secWSAccept = response.headersList.get('Sec-WebSocket-Accept')
- const digest = createHash('sha1').update(keyValue + uid).digest('base64')
+ const digest = crypto.createHash('sha1').update(keyValue + uid).digest('base64')
if (secWSAccept !== digest) {
failWebsocketConnection(ws, 'Incorrect hash received in Sec-WebSocket-Accept header.')
return
diff --git a/deps/undici/src/lib/websocket/frame.js b/deps/undici/src/lib/websocket/frame.js
index 61bfd3915cecc5..d867ad118b29b8 100644
--- a/deps/undici/src/lib/websocket/frame.js
+++ b/deps/undici/src/lib/websocket/frame.js
@@ -1,15 +1,22 @@
'use strict'
-const { randomBytes } = require('crypto')
const { maxUnsigned16Bit } = require('./constants')
+/** @type {import('crypto')} */
+let crypto
+try {
+ crypto = require('crypto')
+} catch {
+
+}
+
class WebsocketFrameSend {
/**
* @param {Buffer|undefined} data
*/
constructor (data) {
this.frameData = data
- this.maskKey = randomBytes(4)
+ this.maskKey = crypto.randomBytes(4)
}
createFrame (opcode) {
diff --git a/deps/undici/src/lib/websocket/websocket.js b/deps/undici/src/lib/websocket/websocket.js
index 22ad2fb11a1910..e4aa58f52fc589 100644
--- a/deps/undici/src/lib/websocket/websocket.js
+++ b/deps/undici/src/lib/websocket/websocket.js
@@ -3,6 +3,7 @@
const { webidl } = require('../fetch/webidl')
const { DOMException } = require('../fetch/constants')
const { URLSerializer } = require('../fetch/dataURL')
+const { getGlobalOrigin } = require('../fetch/global')
const { staticPropertyDescriptors, states, opcodes, emptyBuffer } = require('./constants')
const {
kWebSocketURL,
@@ -57,18 +58,28 @@ class WebSocket extends EventTarget {
url = webidl.converters.USVString(url)
protocols = options.protocols
- // 1. Let urlRecord be the result of applying the URL parser to url.
+ // 1. Let baseURL be this's relevant settings object's API base URL.
+ const baseURL = getGlobalOrigin()
+
+ // 1. Let urlRecord be the result of applying the URL parser to url with baseURL.
let urlRecord
try {
- urlRecord = new URL(url)
+ urlRecord = new URL(url, baseURL)
} catch (e) {
- // 2. If urlRecord is failure, then throw a "SyntaxError" DOMException.
+ // 3. If urlRecord is failure, then throw a "SyntaxError" DOMException.
throw new DOMException(e, 'SyntaxError')
}
- // 3. If urlRecord’s scheme is not "ws" or "wss", then throw a
- // "SyntaxError" DOMException.
+ // 4. If urlRecord’s scheme is "http", then set urlRecord’s scheme to "ws".
+ if (urlRecord.protocol === 'http:') {
+ urlRecord.protocol = 'ws:'
+ } else if (urlRecord.protocol === 'https:') {
+ // 5. Otherwise, if urlRecord’s scheme is "https", set urlRecord’s scheme to "wss".
+ urlRecord.protocol = 'wss:'
+ }
+
+ // 6. If urlRecord’s scheme is not "ws" or "wss", then throw a "SyntaxError" DOMException.
if (urlRecord.protocol !== 'ws:' && urlRecord.protocol !== 'wss:') {
throw new DOMException(
`Expected a ws: or wss: protocol, got ${urlRecord.protocol}`,
@@ -76,19 +87,19 @@ class WebSocket extends EventTarget {
)
}
- // 4. If urlRecord’s fragment is non-null, then throw a "SyntaxError"
+ // 7. If urlRecord’s fragment is non-null, then throw a "SyntaxError"
// DOMException.
- if (urlRecord.hash) {
+ if (urlRecord.hash || urlRecord.href.endsWith('#')) {
throw new DOMException('Got fragment', 'SyntaxError')
}
- // 5. If protocols is a string, set protocols to a sequence consisting
+ // 8. If protocols is a string, set protocols to a sequence consisting
// of just that string.
if (typeof protocols === 'string') {
protocols = [protocols]
}
- // 6. If any of the values in protocols occur more than once or otherwise
+ // 9. If any of the values in protocols occur more than once or otherwise
// fail to match the requirements for elements that comprise the value
// of `Sec-WebSocket-Protocol` fields as defined by The WebSocket
// protocol, then throw a "SyntaxError" DOMException.
@@ -100,12 +111,12 @@ class WebSocket extends EventTarget {
throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError')
}
- // 7. Set this's url to urlRecord.
- this[kWebSocketURL] = urlRecord
+ // 10. Set this's url to urlRecord.
+ this[kWebSocketURL] = new URL(urlRecord.href)
- // 8. Let client be this's relevant settings object.
+ // 11. Let client be this's relevant settings object.
- // 9. Run this step in parallel:
+ // 12. Run this step in parallel:
// 1. Establish a WebSocket connection given urlRecord, protocols,
// and client.
diff --git a/deps/undici/src/package.json b/deps/undici/src/package.json
index 598a78654a9845..3846b9dc3988c5 100644
--- a/deps/undici/src/package.json
+++ b/deps/undici/src/package.json
@@ -1,6 +1,6 @@
{
"name": "undici",
- "version": "5.23.0",
+ "version": "5.25.2",
"description": "An HTTP/1.1 client, written from scratch for Node.js",
"homepage": "https://undici.nodejs.org",
"bugs": {
@@ -11,12 +11,41 @@
"url": "git+https://github.com/nodejs/undici.git"
},
"license": "MIT",
- "author": "Matteo Collina ",
"contributors": [
+ {
+ "name": "Daniele Belardi",
+ "url": "https://github.com/dnlup",
+ "author": true
+ },
+ {
+ "name": "Ethan Arrowood",
+ "url": "https://github.com/ethan-arrowood",
+ "author": true
+ },
+ {
+ "name": "Matteo Collina",
+ "url": "https://github.com/mcollina",
+ "author": true
+ },
+ {
+ "name": "Matthew Aitken",
+ "url": "https://github.com/KhafraDev",
+ "author": true
+ },
{
"name": "Robert Nagy",
"url": "https://github.com/ronag",
"author": true
+ },
+ {
+ "name": "Szymon Marczak",
+ "url": "https://github.com/szmarczak",
+ "author": true
+ },
+ {
+ "name": "Tomas Della Vedova",
+ "url": "https://github.com/delvedor",
+ "author": true
}
],
"keywords": [
@@ -64,10 +93,11 @@
"bench:run": "CONNECTIONS=1 node benchmarks/benchmark.js; CONNECTIONS=50 node benchmarks/benchmark.js",
"serve:website": "docsify serve .",
"prepare": "husky install",
+ "postpublish": "node scripts/update-undici-types-version.js && cd types && npm publish",
"fuzz": "jsfuzz test/fuzzing/fuzz.js corpus"
},
"devDependencies": {
- "@sinonjs/fake-timers": "^10.0.2",
+ "@sinonjs/fake-timers": "^11.1.0",
"@types/node": "^18.0.3",
"abort-controller": "^3.0.0",
"atomic-sleep": "^1.0.0",
@@ -98,7 +128,7 @@
"standard": "^17.0.0",
"table": "^6.8.0",
"tap": "^16.1.0",
- "tsd": "^0.28.1",
+ "tsd": "^0.29.0",
"typescript": "^5.0.2",
"wait-on": "^7.0.1",
"ws": "^8.11.0"
diff --git a/deps/undici/src/types/README.md b/deps/undici/src/types/README.md
new file mode 100644
index 00000000000000..20a721c445a21b
--- /dev/null
+++ b/deps/undici/src/types/README.md
@@ -0,0 +1,6 @@
+# undici-types
+
+This package is a dual-publish of the [undici](https://www.npmjs.com/package/undici) library types. The `undici` package **still contains types**. This package is for users who _only_ need undici types (such as for `@types/node`). It is published alongside every release of `undici`, so you can always use the same version.
+
+- [GitHub nodejs/undici](https://github.com/nodejs/undici)
+- [Undici Documentation](https://undici.nodejs.org/#/)
diff --git a/deps/undici/src/types/client.d.ts b/deps/undici/src/types/client.d.ts
index 56074a15ae7a13..ac1779721f6a2c 100644
--- a/deps/undici/src/types/client.d.ts
+++ b/deps/undici/src/types/client.d.ts
@@ -1,7 +1,6 @@
import { URL } from 'url'
import { TlsOptions } from 'tls'
import Dispatcher from './dispatcher'
-import DispatchInterceptor from './dispatcher'
import buildConnector from "./connector";
/**
@@ -19,14 +18,14 @@ export class Client extends Dispatcher {
export declare namespace Client {
export interface OptionsInterceptors {
- Client: readonly DispatchInterceptor[];
+ Client: readonly Dispatcher.DispatchInterceptor[];
}
export interface Options {
/** TODO */
interceptors?: OptionsInterceptors;
/** The maximum length of request headers in bytes. Default: `16384` (16KiB). */
maxHeaderSize?: number;
- /** The amount of time the parser will wait to receive the complete HTTP headers (Node 14 and above only). Default: `300e3` milliseconds (300s). */
+ /** The amount of time, in milliseconds, the parser will wait to receive the complete HTTP headers (Node 14 and above only). Default: `300e3` milliseconds (300s). */
headersTimeout?: number;
/** @deprecated unsupported socketTimeout, use headersTimeout & bodyTimeout instead */
socketTimeout?: never;
@@ -40,13 +39,13 @@ export declare namespace Client {
idleTimeout?: never;
/** @deprecated unsupported keepAlive, use pipelining=0 instead */
keepAlive?: never;
- /** the timeout after which a socket without active requests will time out. Monitors time between activity on a connected socket. This value may be overridden by *keep-alive* hints from the server. Default: `4e3` milliseconds (4s). */
+ /** the timeout, in milliseconds, after which a socket without active requests will time out. Monitors time between activity on a connected socket. This value may be overridden by *keep-alive* hints from the server. Default: `4e3` milliseconds (4s). */
keepAliveTimeout?: number;
/** @deprecated unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead */
maxKeepAliveTimeout?: never;
- /** the maximum allowed `idleTimeout` when overridden by *keep-alive* hints from the server. Default: `600e3` milliseconds (10min). */
+ /** the maximum allowed `idleTimeout`, in milliseconds, when overridden by *keep-alive* hints from the server. Default: `600e3` milliseconds (10min). */
keepAliveMaxTimeout?: number;
- /** A number subtracted from server *keep-alive* hints when overriding `idleTimeout` to account for timing inaccuracies caused by e.g. transport latency. Default: `1e3` milliseconds (1s). */
+ /** A number of milliseconds subtracted from server *keep-alive* hints when overriding `idleTimeout` to account for timing inaccuracies caused by e.g. transport latency. Default: `1e3` milliseconds (1s). */
keepAliveTimeoutThreshold?: number;
/** TODO */
socketPath?: string;
@@ -71,7 +70,17 @@ export declare namespace Client {
/** Enables a family autodetection algorithm that loosely implements section 5 of RFC 8305. */
autoSelectFamily?: boolean;
/** The amount of time in milliseconds to wait for a connection attempt to finish before trying the next address when using the `autoSelectFamily` option. */
- autoSelectFamilyAttemptTimeout?: number;
+ autoSelectFamilyAttemptTimeout?: number;
+ /**
+ * @description Enables support for H2 if the server has assigned bigger priority to it through ALPN negotiation.
+ * @default false
+ */
+ allowH2?: boolean;
+ /**
+ * @description Dictates the maximum number of concurrent streams for a single H2 session. It can be overriden by a SETTINGS remote frame.
+ * @default 100
+ */
+ maxConcurrentStreams?: number
}
export interface SocketInfo {
localAddress?: string
diff --git a/deps/undici/src/types/dispatcher.d.ts b/deps/undici/src/types/dispatcher.d.ts
index 7f621371f86ec1..816db19d20d878 100644
--- a/deps/undici/src/types/dispatcher.d.ts
+++ b/deps/undici/src/types/dispatcher.d.ts
@@ -109,7 +109,7 @@ declare namespace Dispatcher {
blocking?: boolean;
/** Upgrade the request. Should be used to specify the kind of upgrade i.e. `'Websocket'`. Default: `method === 'CONNECT' || null`. */
upgrade?: boolean | string | null;
- /** The amount of time the parser will wait to receive the complete HTTP headers. Defaults to 300 seconds. */
+ /** The amount of time, in milliseconds, the parser will wait to receive the complete HTTP headers. Defaults to 300 seconds. */
headersTimeout?: number | null;
/** The timeout after which a request will time out, in milliseconds. Monitors time between receiving body data. Use 0 to disable it entirely. Defaults to 300 seconds. */
bodyTimeout?: number | null;
@@ -117,6 +117,8 @@ declare namespace Dispatcher {
reset?: boolean;
/** Whether Undici should throw an error upon receiving a 4xx or 5xx response from the server. Defaults to false */
throwOnError?: boolean;
+ /** For H2, it appends the expect: 100-continue header, and halts the request body until a 100-continue is received from the remote server*/
+ expectContinue?: boolean;
}
export interface ConnectOptions {
path: string;
diff --git a/deps/undici/src/types/index.d.ts b/deps/undici/src/types/index.d.ts
new file mode 100644
index 00000000000000..c7532d69a073cc
--- /dev/null
+++ b/deps/undici/src/types/index.d.ts
@@ -0,0 +1,57 @@
+import Dispatcher from'./dispatcher'
+import { setGlobalDispatcher, getGlobalDispatcher } from './global-dispatcher'
+import { setGlobalOrigin, getGlobalOrigin } from './global-origin'
+import Pool from'./pool'
+import { RedirectHandler, DecoratorHandler } from './handlers'
+
+import BalancedPool from './balanced-pool'
+import Client from'./client'
+import buildConnector from'./connector'
+import errors from'./errors'
+import Agent from'./agent'
+import MockClient from'./mock-client'
+import MockPool from'./mock-pool'
+import MockAgent from'./mock-agent'
+import mockErrors from'./mock-errors'
+import ProxyAgent from'./proxy-agent'
+import { request, pipeline, stream, connect, upgrade } from './api'
+
+export * from './cookies'
+export * from './fetch'
+export * from './file'
+export * from './filereader'
+export * from './formdata'
+export * from './diagnostics-channel'
+export * from './websocket'
+export * from './content-type'
+export * from './cache'
+export { Interceptable } from './mock-interceptor'
+
+export { Dispatcher, BalancedPool, Pool, Client, buildConnector, errors, Agent, request, stream, pipeline, connect, upgrade, setGlobalDispatcher, getGlobalDispatcher, setGlobalOrigin, getGlobalOrigin, MockClient, MockPool, MockAgent, mockErrors, ProxyAgent, RedirectHandler, DecoratorHandler }
+export default Undici
+
+declare namespace Undici {
+ var Dispatcher: typeof import('./dispatcher').default
+ var Pool: typeof import('./pool').default;
+ var RedirectHandler: typeof import ('./handlers').RedirectHandler
+ var DecoratorHandler: typeof import ('./handlers').DecoratorHandler
+ var createRedirectInterceptor: typeof import ('./interceptors').createRedirectInterceptor
+ var BalancedPool: typeof import('./balanced-pool').default;
+ var Client: typeof import('./client').default;
+ var buildConnector: typeof import('./connector').default;
+ var errors: typeof import('./errors').default;
+ var Agent: typeof import('./agent').default;
+ var setGlobalDispatcher: typeof import('./global-dispatcher').setGlobalDispatcher;
+ var getGlobalDispatcher: typeof import('./global-dispatcher').getGlobalDispatcher;
+ var request: typeof import('./api').request;
+ var stream: typeof import('./api').stream;
+ var pipeline: typeof import('./api').pipeline;
+ var connect: typeof import('./api').connect;
+ var upgrade: typeof import('./api').upgrade;
+ var MockClient: typeof import('./mock-client').default;
+ var MockPool: typeof import('./mock-pool').default;
+ var MockAgent: typeof import('./mock-agent').default;
+ var mockErrors: typeof import('./mock-errors').default;
+ var fetch: typeof import('./fetch').fetch;
+ var caches: typeof import('./cache').caches;
+}
diff --git a/deps/undici/src/types/package.json b/deps/undici/src/types/package.json
new file mode 100644
index 00000000000000..16bf97c4ddf83c
--- /dev/null
+++ b/deps/undici/src/types/package.json
@@ -0,0 +1,55 @@
+{
+ "name": "undici-types",
+ "version": "5.25.1",
+ "description": "A stand-alone types package for Undici",
+ "homepage": "https://undici.nodejs.org",
+ "bugs": {
+ "url": "https://github.com/nodejs/undici/issues"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/nodejs/undici.git"
+ },
+ "license": "MIT",
+ "types": "index.d.ts",
+ "files": [
+ "*.d.ts"
+ ],
+ "contributors": [
+ {
+ "name": "Daniele Belardi",
+ "url": "https://github.com/dnlup",
+ "author": true
+ },
+ {
+ "name": "Ethan Arrowood",
+ "url": "https://github.com/ethan-arrowood",
+ "author": true
+ },
+ {
+ "name": "Matteo Collina",
+ "url": "https://github.com/mcollina",
+ "author": true
+ },
+ {
+ "name": "Matthew Aitken",
+ "url": "https://github.com/KhafraDev",
+ "author": true
+ },
+ {
+ "name": "Robert Nagy",
+ "url": "https://github.com/ronag",
+ "author": true
+ },
+ {
+ "name": "Szymon Marczak",
+ "url": "https://github.com/szmarczak",
+ "author": true
+ },
+ {
+ "name": "Tomas Della Vedova",
+ "url": "https://github.com/delvedor",
+ "author": true
+ }
+ ]
+}
\ No newline at end of file
diff --git a/deps/undici/undici.js b/deps/undici/undici.js
index bea59e571c8772..cd6308f9f3cc2d 100644
--- a/deps/undici/undici.js
+++ b/deps/undici/undici.js
@@ -60,7 +60,13 @@ var require_symbols = __commonJS({
kProxy: Symbol("proxy agent options"),
kCounter: Symbol("socket request counter"),
kInterceptors: Symbol("dispatch interceptors"),
- kMaxResponseSize: Symbol("max response size")
+ kMaxResponseSize: Symbol("max response size"),
+ kHTTP2Session: Symbol("http2Session"),
+ kHTTP2SessionState: Symbol("http2Session state"),
+ kHTTP2BuildRequest: Symbol("http2 build request"),
+ kHTTP1BuildRequest: Symbol("http1 build request"),
+ kHTTP2CopyHeaders: Symbol("http2 copy headers"),
+ kHTTPConnVersion: Symbol("http connection version")
};
}
});
@@ -292,7 +298,7 @@ var require_util = __commonJS({
var stream = require("stream");
var net = require("net");
var { InvalidArgumentError } = require_errors();
- var { Blob } = require("buffer");
+ var { Blob: Blob2 } = require("buffer");
var nodeUtil = require("util");
var { stringify } = require("querystring");
var [nodeMajor, nodeMinor] = process.versions.node.split(".").map((v) => Number(v));
@@ -302,7 +308,7 @@ var require_util = __commonJS({
return obj && typeof obj === "object" && typeof obj.pipe === "function" && typeof obj.on === "function";
}
function isBlobLike(object) {
- return Blob && object instanceof Blob || object && typeof object === "object" && (typeof object.stream === "function" || typeof object.arrayBuffer === "function") && /^(Blob|File)$/.test(object[Symbol.toStringTag]);
+ return Blob2 && object instanceof Blob2 || object && typeof object === "object" && (typeof object.stream === "function" || typeof object.arrayBuffer === "function") && /^(Blob|File)$/.test(object[Symbol.toStringTag]);
}
function buildURL(url, queryParams) {
if (url.includes("?") || url.includes("#")) {
@@ -400,7 +406,7 @@ var require_util = __commonJS({
return 0;
} else if (isStream(body)) {
const state = body._readableState;
- return state && state.ended === true && Number.isFinite(state.length) ? state.length : null;
+ return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length) ? state.length : null;
} else if (isBlobLike(body)) {
return body.size != null ? body.size : null;
} else if (isBuffer(body)) {
@@ -439,6 +445,8 @@ var require_util = __commonJS({
return m ? parseInt(m[1], 10) * 1e3 : null;
}
function parseHeaders(headers, obj = {}) {
+ if (!Array.isArray(headers))
+ return headers;
for (let i = 0; i < headers.length; i += 2) {
const key = headers[i].toString().toLowerCase();
let val = obj[key];
@@ -535,13 +543,18 @@ var require_util = __commonJS({
bytesRead: socket.bytesRead
};
}
+ async function* convertIterableToBuffer(iterable) {
+ for await (const chunk of iterable) {
+ yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk);
+ }
+ }
var ReadableStream;
function ReadableStreamFrom(iterable) {
if (!ReadableStream) {
ReadableStream = require("stream/web").ReadableStream;
}
if (ReadableStream.from) {
- return ReadableStream.from(iterable);
+ return ReadableStream.from(convertIterableToBuffer(iterable));
}
let iterator;
return new ReadableStream({
@@ -1139,11 +1152,24 @@ var require_util2 = __commonJS({
const metadata = list.filter((item) => item.algo === strongest);
for (const item of metadata) {
const algorithm = item.algo;
- const expectedValue = item.hash;
- const actualValue = crypto.createHash(algorithm).update(bytes).digest("base64");
+ let expectedValue = item.hash;
+ if (expectedValue.endsWith("==")) {
+ expectedValue = expectedValue.slice(0, -2);
+ }
+ let actualValue = crypto.createHash(algorithm).update(bytes).digest("base64");
+ if (actualValue.endsWith("==")) {
+ actualValue = actualValue.slice(0, -2);
+ }
if (actualValue === expectedValue) {
return true;
}
+ let actualBase64URL = crypto.createHash(algorithm).update(bytes).digest("base64url");
+ if (actualBase64URL.endsWith("==")) {
+ actualBase64URL = actualBase64URL.slice(0, -2);
+ }
+ if (actualBase64URL === expectedValue) {
+ return true;
+ }
}
return false;
}
@@ -1250,9 +1276,9 @@ var require_util2 = __commonJS({
}
return { value: result, done: false };
}
- function fullyReadBody(body, processBody, processBodyError) {
- const successSteps = (bytes) => queueMicrotask(() => processBody(bytes));
- const errorSteps = (error) => queueMicrotask(() => processBodyError(error));
+ async function fullyReadBody(body, processBody, processBodyError) {
+ const successSteps = processBody;
+ const errorSteps = processBodyError;
let reader;
try {
reader = body.stream.getReader();
@@ -1260,7 +1286,12 @@ var require_util2 = __commonJS({
errorSteps(e);
return;
}
- readAllBytes(reader, successSteps, errorSteps);
+ try {
+ const result = await readAllBytes(reader);
+ successSteps(result);
+ } catch (e) {
+ errorSteps(e);
+ }
}
var ReadableStream = globalThis.ReadableStream;
function isReadableStreamLike(stream) {
@@ -1291,25 +1322,16 @@ var require_util2 = __commonJS({
}
return input;
}
- async function readAllBytes(reader, successSteps, failureSteps) {
+ async function readAllBytes(reader) {
const bytes = [];
let byteLength = 0;
while (true) {
- let done;
- let chunk;
- try {
- ({ done, value: chunk } = await reader.read());
- } catch (e) {
- failureSteps(e);
- return;
- }
+ const { done, value: chunk } = await reader.read();
if (done) {
- successSteps(Buffer.concat(bytes, byteLength));
- return;
+ return Buffer.concat(bytes, byteLength);
}
if (!isUint8Array(chunk)) {
- failureSteps(new TypeError("Received non-Uint8Array chunk"));
- return;
+ throw new TypeError("Received non-Uint8Array chunk");
}
bytes.push(chunk);
byteLength += chunk.length;
@@ -6015,14 +6037,14 @@ var require_dataURL = __commonJS({
var require_file = __commonJS({
"lib/fetch/file.js"(exports2, module2) {
"use strict";
- var { Blob, File: NativeFile } = require("buffer");
+ var { Blob: Blob2, File: NativeFile } = require("buffer");
var { types } = require("util");
var { kState } = require_symbols2();
var { isBlobLike } = require_util2();
var { webidl } = require_webidl();
var { parseMIMEType, serializeAMimeType } = require_dataURL();
var { kEnumerableProperty } = require_util();
- var File = class extends Blob {
+ var File = class extends Blob2 {
constructor(fileBits, fileName, options = {}) {
webidl.argumentLengthCheck(arguments, 2, { header: "File constructor" });
fileBits = webidl.converters["sequence"](fileBits);
@@ -6118,7 +6140,7 @@ var require_file = __commonJS({
name: kEnumerableProperty,
lastModified: kEnumerableProperty
});
- webidl.converters.Blob = webidl.interfaceConverter(Blob);
+ webidl.converters.Blob = webidl.interfaceConverter(Blob2);
webidl.converters.BlobPart = function(V, opts) {
if (webidl.util.Type(V) === "Object") {
if (isBlobLike(V)) {
@@ -6200,7 +6222,7 @@ var require_formdata = __commonJS({
var { kState } = require_symbols2();
var { File: UndiciFile, FileLike, isFileLike } = require_file();
var { webidl } = require_webidl();
- var { Blob, File: NativeFile } = require("buffer");
+ var { Blob: Blob2, File: NativeFile } = require("buffer");
var File = NativeFile ?? UndiciFile;
var FormData = class {
constructor(form) {
@@ -6310,7 +6332,7 @@ var require_formdata = __commonJS({
value = Buffer.from(value).toString("utf8");
} else {
if (!isFileLike(value)) {
- value = value instanceof Blob ? new File([value], "blob", { type: value.type }) : new FileLike(value, "blob", { type: value.type });
+ value = value instanceof Blob2 ? new File([value], "blob", { type: value.type }) : new FileLike(value, "blob", { type: value.type });
}
if (filename !== void 0) {
const options = {
@@ -6344,7 +6366,7 @@ var require_body = __commonJS({
var { kState } = require_symbols2();
var { webidl } = require_webidl();
var { DOMException, structuredClone } = require_constants();
- var { Blob, File: NativeFile } = require("buffer");
+ var { Blob: Blob2, File: NativeFile } = require("buffer");
var { kBodyUsed } = require_symbols();
var assert = require("assert");
var { isErrored } = require_util();
@@ -6536,7 +6558,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r
} else if (mimeType) {
mimeType = serializeAMimeType(mimeType);
}
- return new Blob([bytes], { type: mimeType });
+ return new Blob2([bytes], { type: mimeType });
}, instance);
},
arrayBuffer() {
@@ -6563,6 +6585,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r
try {
busboy = Busboy({
headers,
+ preservePath: true,
defParamCharset: "utf8"
});
} catch (err) {
@@ -6660,7 +6683,7 @@ Content-Type: ${value.type || "application/octet-stream"}\r
successSteps(new Uint8Array());
return promise.promise;
}
- fullyReadBody(object[kState].body, successSteps, errorSteps);
+ await fullyReadBody(object[kState].body, successSteps, errorSteps);
return promise.promise;
}
function bodyUnusable(body) {
@@ -6738,7 +6761,7 @@ var require_response = __commonJS({
responseObject[kHeaders][kRealm] = relevantRealm;
return responseObject;
}
- static json(data = void 0, init = {}) {
+ static json(data, init = {}) {
webidl.argumentLengthCheck(arguments, 1, { header: "Response.json" });
if (init !== null) {
init = webidl.converters.ResponseInit(init);
@@ -6959,9 +6982,9 @@ var require_response = __commonJS({
assert(false);
}
}
- function makeAppropriateNetworkError(fetchParams) {
+ function makeAppropriateNetworkError(fetchParams, err = null) {
assert(isCancelled(fetchParams));
- return isAborted(fetchParams) ? makeNetworkError(new DOMException("The operation was aborted.", "AbortError")) : makeNetworkError("Request was cancelled.");
+ return isAborted(fetchParams) ? makeNetworkError(Object.assign(new DOMException("The operation was aborted.", "AbortError"), { cause: err })) : makeNetworkError(Object.assign(new DOMException("Request was cancelled."), { cause: err }));
}
function initializeResponse(response, init, body) {
if (init.status !== null && (init.status < 200 || init.status > 599)) {
@@ -8140,6 +8163,7 @@ var require_request2 = __commonJS({
NotSupportedError
} = require_errors();
var assert = require("assert");
+ var { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = require_symbols();
var util = require_util();
var tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/;
var headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/;
@@ -8174,7 +8198,8 @@ var require_request2 = __commonJS({
headersTimeout,
bodyTimeout,
reset,
- throwOnError
+ throwOnError,
+ expectContinue
}, handler) {
if (typeof path !== "string") {
throw new InvalidArgumentError("path must be a string");
@@ -8200,6 +8225,9 @@ var require_request2 = __commonJS({
if (reset != null && typeof reset !== "boolean") {
throw new InvalidArgumentError("invalid reset");
}
+ if (expectContinue != null && typeof expectContinue !== "boolean") {
+ throw new InvalidArgumentError("invalid expectContinue");
+ }
this.headersTimeout = headersTimeout;
this.bodyTimeout = bodyTimeout;
this.throwOnError = throwOnError === true;
@@ -8233,6 +8261,7 @@ var require_request2 = __commonJS({
this.contentLength = null;
this.contentType = null;
this.headers = "";
+ this.expectContinue = expectContinue != null ? expectContinue : false;
if (Array.isArray(headers)) {
if (headers.length % 2 !== 0) {
throw new InvalidArgumentError("headers array must be even");
@@ -8335,8 +8364,48 @@ var require_request2 = __commonJS({
processHeader(this, key, value);
return this;
}
+ static [kHTTP1BuildRequest](origin, opts, handler) {
+ return new Request(origin, opts, handler);
+ }
+ static [kHTTP2BuildRequest](origin, opts, handler) {
+ const headers = opts.headers;
+ opts = { ...opts, headers: null };
+ const request = new Request(origin, opts, handler);
+ request.headers = {};
+ if (Array.isArray(headers)) {
+ if (headers.length % 2 !== 0) {
+ throw new InvalidArgumentError("headers array must be even");
+ }
+ for (let i = 0; i < headers.length; i += 2) {
+ processHeader(request, headers[i], headers[i + 1], true);
+ }
+ } else if (headers && typeof headers === "object") {
+ const keys = Object.keys(headers);
+ for (let i = 0; i < keys.length; i++) {
+ const key = keys[i];
+ processHeader(request, key, headers[key], true);
+ }
+ } else if (headers != null) {
+ throw new InvalidArgumentError("headers must be an object or an array");
+ }
+ return request;
+ }
+ static [kHTTP2CopyHeaders](raw) {
+ const rawHeaders = raw.split("\r\n");
+ const headers = {};
+ for (const header of rawHeaders) {
+ const [key, value] = header.split(": ");
+ if (value == null || value.length === 0)
+ continue;
+ if (headers[key])
+ headers[key] += `,${value}`;
+ else
+ headers[key] = value;
+ }
+ return headers;
+ }
};
- function processHeaderValue(key, val) {
+ function processHeaderValue(key, val, skipAppend) {
if (val && typeof val === "object") {
throw new InvalidArgumentError(`invalid ${key} header`);
}
@@ -8344,10 +8413,10 @@ var require_request2 = __commonJS({
if (headerCharRegex.exec(val) !== null) {
throw new InvalidArgumentError(`invalid ${key} header`);
}
- return `${key}: ${val}\r
+ return skipAppend ? val : `${key}: ${val}\r
`;
}
- function processHeader(request, key, val) {
+ function processHeader(request, key, val, skipAppend = false) {
if (val && (typeof val === "object" && !Array.isArray(val))) {
throw new InvalidArgumentError(`invalid ${key} header`);
} else if (val === void 0) {
@@ -8386,10 +8455,20 @@ var require_request2 = __commonJS({
} else {
if (Array.isArray(val)) {
for (let i = 0; i < val.length; i++) {
- request.headers += processHeaderValue(key, val[i]);
+ if (skipAppend) {
+ if (request.headers[key])
+ request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}`;
+ else
+ request.headers[key] = processHeaderValue(key, val[i], skipAppend);
+ } else {
+ request.headers += processHeaderValue(key, val[i]);
+ }
}
} else {
- request.headers += processHeaderValue(key, val);
+ if (skipAppend)
+ request.headers[key] = processHeaderValue(key, val, skipAppend);
+ else
+ request.headers += processHeaderValue(key, val);
}
}
}
@@ -8455,13 +8534,14 @@ var require_connect = __commonJS({
}
};
}
- function buildConnector({ maxCachedSessions, socketPath, timeout, ...opts }) {
+ function buildConnector({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) {
if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) {
throw new InvalidArgumentError("maxCachedSessions must be a positive integer or zero");
}
const options = { path: socketPath, ...opts };
const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions);
timeout = timeout == null ? 1e4 : timeout;
+ allowH2 = allowH2 != null ? allowH2 : false;
return function connect({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) {
let socket;
if (protocol === "https:") {
@@ -8478,6 +8558,7 @@ var require_connect = __commonJS({
servername,
session,
localAddress,
+ ALPNProtocols: allowH2 ? ["http/1.1", "h2"] : ["http/1.1"],
socket: httpSocket,
port: port || 443,
host: hostname
@@ -9068,6 +9149,7 @@ var require_client = __commonJS({
"use strict";
var assert = require("assert");
var net = require("net");
+ var { pipeline } = require("stream");
var util = require_util();
var timers = require_timers();
var Request = require_request2();
@@ -9129,8 +9211,32 @@ var require_client = __commonJS({
kDispatch,
kInterceptors,
kLocalAddress,
- kMaxResponseSize
+ kMaxResponseSize,
+ kHTTPConnVersion,
+ kHost,
+ kHTTP2Session,
+ kHTTP2SessionState,
+ kHTTP2BuildRequest,
+ kHTTP2CopyHeaders,
+ kHTTP1BuildRequest
} = require_symbols();
+ var http2;
+ try {
+ http2 = require("http2");
+ } catch {
+ http2 = { constants: {} };
+ }
+ var {
+ constants: {
+ HTTP2_HEADER_AUTHORITY,
+ HTTP2_HEADER_METHOD,
+ HTTP2_HEADER_PATH,
+ HTTP2_HEADER_CONTENT_LENGTH,
+ HTTP2_HEADER_EXPECT,
+ HTTP2_HEADER_STATUS
+ }
+ } = http2;
+ var h2ExperimentalWarned = false;
var FastBuffer = Buffer[Symbol.species];
var kClosedResolve = Symbol("kClosedResolve");
var channels = {};
@@ -9172,7 +9278,9 @@ var require_client = __commonJS({
localAddress,
maxResponseSize,
autoSelectFamily,
- autoSelectFamilyAttemptTimeout
+ autoSelectFamilyAttemptTimeout,
+ allowH2,
+ maxConcurrentStreams
} = {}) {
super();
if (keepAlive !== void 0) {
@@ -9232,10 +9340,17 @@ var require_client = __commonJS({
if (autoSelectFamilyAttemptTimeout != null && (!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1)) {
throw new InvalidArgumentError("autoSelectFamilyAttemptTimeout must be a positive number");
}
+ if (allowH2 != null && typeof allowH2 !== "boolean") {
+ throw new InvalidArgumentError("allowH2 must be a valid boolean value");
+ }
+ if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== "number" || maxConcurrentStreams < 1)) {
+ throw new InvalidArgumentError("maxConcurrentStreams must be a possitive integer, greater than 0");
+ }
if (typeof connect2 !== "function") {
connect2 = buildConnector({
...tls,
maxCachedSessions,
+ allowH2,
socketPath,
timeout: connectTimeout,
...util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : void 0,
@@ -9265,6 +9380,13 @@ var require_client = __commonJS({
this[kMaxRequests] = maxRequestsPerClient;
this[kClosedResolve] = null;
this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1;
+ this[kHTTPConnVersion] = "h1";
+ this[kHTTP2Session] = null;
+ this[kHTTP2SessionState] = !allowH2 ? null : {
+ openStreams: 0,
+ maxConcurrentStreams: maxConcurrentStreams != null ? maxConcurrentStreams : 100
+ };
+ this[kHost] = `${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ""}`;
this[kQueue] = [];
this[kRunningIdx] = 0;
this[kPendingIdx] = 0;
@@ -9298,7 +9420,7 @@ var require_client = __commonJS({
}
[kDispatch](opts, handler) {
const origin = opts.origin || this[kUrl].origin;
- const request = new Request(origin, opts, handler);
+ const request = this[kHTTPConnVersion] === "h2" ? Request[kHTTP2BuildRequest](origin, opts, handler) : Request[kHTTP1BuildRequest](origin, opts, handler);
this[kQueue].push(request);
if (this[kResuming]) {
} else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) {
@@ -9335,6 +9457,11 @@ var require_client = __commonJS({
}
resolve();
};
+ if (this[kHTTP2Session] != null) {
+ util.destroy(this[kHTTP2Session], err);
+ this[kHTTP2Session] = null;
+ this[kHTTP2SessionState] = null;
+ }
if (!this[kSocket]) {
queueMicrotask(callback);
} else {
@@ -9344,6 +9471,44 @@ var require_client = __commonJS({
});
}
};
+ function onHttp2SessionError(err) {
+ assert(err.code !== "ERR_TLS_CERT_ALTNAME_INVALID");
+ this[kSocket][kError] = err;
+ onError(this[kClient], err);
+ }
+ function onHttp2FrameError(type, code, id) {
+ const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`);
+ if (id === 0) {
+ this[kSocket][kError] = err;
+ onError(this[kClient], err);
+ }
+ }
+ function onHttp2SessionEnd() {
+ util.destroy(this, new SocketError("other side closed"));
+ util.destroy(this[kSocket], new SocketError("other side closed"));
+ }
+ function onHTTP2GoAway(code) {
+ const client = this[kClient];
+ const err = new InformationalError(`HTTP/2: "GOAWAY" frame received with code ${code}`);
+ client[kSocket] = null;
+ client[kHTTP2Session] = null;
+ if (client.destroyed) {
+ assert(this[kPending] === 0);
+ const requests = client[kQueue].splice(client[kRunningIdx]);
+ for (let i = 0; i < requests.length; i++) {
+ const request = requests[i];
+ errorRequest(this, request, err);
+ }
+ } else if (client[kRunning] > 0) {
+ const request = client[kQueue][client[kRunningIdx]];
+ client[kQueue][client[kRunningIdx]++] = null;
+ errorRequest(client, request, err);
+ }
+ client[kPendingIdx] = client[kRunningIdx];
+ assert(client[kRunning] === 0);
+ client.emit("disconnect", client[kUrl], [client], err);
+ resume(client);
+ }
var constants = require_constants2();
var createRedirectInterceptor = require_redirectInterceptor();
var EMPTY_BUF = Buffer.alloc(0);
@@ -9783,11 +9948,13 @@ var require_client = __commonJS({
parser.readMore();
}
function onSocketError(err) {
- const { [kParser]: parser } = this;
+ const { [kClient]: client, [kParser]: parser } = this;
assert(err.code !== "ERR_TLS_CERT_ALTNAME_INVALID");
- if (err.code === "ECONNRESET" && parser.statusCode && !parser.shouldKeepAlive) {
- parser.onMessageComplete();
- return;
+ if (client[kHTTPConnVersion] !== "h2") {
+ if (err.code === "ECONNRESET" && parser.statusCode && !parser.shouldKeepAlive) {
+ parser.onMessageComplete();
+ return;
+ }
}
this[kError] = err;
onError(this[kClient], err);
@@ -9804,20 +9971,24 @@ var require_client = __commonJS({
}
}
function onSocketEnd() {
- const { [kParser]: parser } = this;
- if (parser.statusCode && !parser.shouldKeepAlive) {
- parser.onMessageComplete();
- return;
+ const { [kParser]: parser, [kClient]: client } = this;
+ if (client[kHTTPConnVersion] !== "h2") {
+ if (parser.statusCode && !parser.shouldKeepAlive) {
+ parser.onMessageComplete();
+ return;
+ }
}
util.destroy(this, new SocketError("other side closed", util.getSocketInfo(this)));
}
function onSocketClose() {
- const { [kClient]: client } = this;
- if (!this[kError] && this[kParser].statusCode && !this[kParser].shouldKeepAlive) {
- this[kParser].onMessageComplete();
+ const { [kClient]: client, [kParser]: parser } = this;
+ if (client[kHTTPConnVersion] === "h1" && parser) {
+ if (!this[kError] && parser.statusCode && !parser.shouldKeepAlive) {
+ parser.onMessageComplete();
+ }
+ this[kParser].destroy();
+ this[kParser] = null;
}
- this[kParser].destroy();
- this[kParser] = null;
const err = this[kError] || new SocketError("closed", util.getSocketInfo(this));
client[kSocket] = null;
if (client.destroyed) {
@@ -9884,21 +10055,46 @@ var require_client = __commonJS({
}), new ClientDestroyedError());
return;
}
- if (!llhttpInstance) {
- llhttpInstance = await llhttpPromise;
- llhttpPromise = null;
- }
client[kConnecting] = false;
assert(socket);
- socket[kNoRef] = false;
- socket[kWriting] = false;
- socket[kReset] = false;
- socket[kBlocking] = false;
- socket[kError] = null;
- socket[kParser] = new Parser(client, socket, llhttpInstance);
- socket[kClient] = client;
+ const isH2 = socket.alpnProtocol === "h2";
+ if (isH2) {
+ if (!h2ExperimentalWarned) {
+ h2ExperimentalWarned = true;
+ process.emitWarning("H2 support is experimental, expect them to change at any time.", {
+ code: "UNDICI-H2"
+ });
+ }
+ const session = http2.connect(client[kUrl], {
+ createConnection: () => socket,
+ peerMaxConcurrentStreams: client[kHTTP2SessionState].maxConcurrentStreams
+ });
+ client[kHTTPConnVersion] = "h2";
+ session[kClient] = client;
+ session[kSocket] = socket;
+ session.on("error", onHttp2SessionError);
+ session.on("frameError", onHttp2FrameError);
+ session.on("end", onHttp2SessionEnd);
+ session.on("goaway", onHTTP2GoAway);
+ session.on("close", onSocketClose);
+ session.unref();
+ client[kHTTP2Session] = session;
+ socket[kHTTP2Session] = session;
+ } else {
+ if (!llhttpInstance) {
+ llhttpInstance = await llhttpPromise;
+ llhttpPromise = null;
+ }
+ socket[kNoRef] = false;
+ socket[kWriting] = false;
+ socket[kReset] = false;
+ socket[kBlocking] = false;
+ socket[kParser] = new Parser(client, socket, llhttpInstance);
+ }
socket[kCounter] = 0;
socket[kMaxRequests] = client[kMaxRequests];
+ socket[kClient] = client;
+ socket[kError] = null;
socket.on("error", onSocketError).on("readable", onSocketReadable).on("end", onSocketEnd).on("close", onSocketClose);
client[kSocket] = socket;
if (channels.connected.hasSubscribers) {
@@ -9977,7 +10173,7 @@ var require_client = __commonJS({
return;
}
const socket = client[kSocket];
- if (socket && !socket.destroyed) {
+ if (socket && !socket.destroyed && socket.alpnProtocol !== "h2") {
if (client[kSize] === 0) {
if (!socket[kNoRef] && socket.unref) {
socket.unref();
@@ -10030,7 +10226,7 @@ var require_client = __commonJS({
if (client[kConnecting]) {
return;
}
- if (!socket) {
+ if (!socket && !client[kHTTP2Session]) {
connect(client);
return;
}
@@ -10064,6 +10260,10 @@ var require_client = __commonJS({
}
}
function write(client, request) {
+ if (client[kHTTPConnVersion] === "h2") {
+ writeH2(client, client[kHTTP2Session], request);
+ return;
+ }
const { body, method, path, host, upgrade, headers, blocking, reset } = request;
const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
if (body && typeof body.read === "function") {
@@ -10175,8 +10375,205 @@ upgrade: ${upgrade}\r
}
return true;
}
- function writeStream({ body, client, request, socket, contentLength, header, expectsPayload }) {
+ function writeH2(client, session, request) {
+ const { body, method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request;
+ let headers;
+ if (typeof reqHeaders === "string")
+ headers = Request[kHTTP2CopyHeaders](reqHeaders.trim());
+ else
+ headers = reqHeaders;
+ if (upgrade) {
+ errorRequest(client, request, new Error("Upgrade not supported for H2"));
+ return false;
+ }
+ try {
+ request.onConnect((err) => {
+ if (request.aborted || request.completed) {
+ return;
+ }
+ errorRequest(client, request, err || new RequestAbortedError());
+ });
+ } catch (err) {
+ errorRequest(client, request, err);
+ }
+ if (request.aborted) {
+ return false;
+ }
+ let stream;
+ const h2State = client[kHTTP2SessionState];
+ headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost];
+ headers[HTTP2_HEADER_PATH] = path;
+ if (method === "CONNECT") {
+ session.ref();
+ stream = session.request(headers, { endStream: false, signal });
+ if (stream.id && !stream.pending) {
+ request.onUpgrade(null, null, stream);
+ ++h2State.openStreams;
+ } else {
+ stream.once("ready", () => {
+ request.onUpgrade(null, null, stream);
+ ++h2State.openStreams;
+ });
+ }
+ stream.once("close", () => {
+ h2State.openStreams -= 1;
+ if (h2State.openStreams === 0)
+ session.unref();
+ });
+ return true;
+ } else {
+ headers[HTTP2_HEADER_METHOD] = method;
+ }
+ const expectsPayload = method === "PUT" || method === "POST" || method === "PATCH";
+ if (body && typeof body.read === "function") {
+ body.read(0);
+ }
+ let contentLength = util.bodyLength(body);
+ if (contentLength == null) {
+ contentLength = request.contentLength;
+ }
+ if (contentLength === 0 || !expectsPayload) {
+ contentLength = null;
+ }
+ if (request.contentLength != null && request.contentLength !== contentLength) {
+ if (client[kStrictContentLength]) {
+ errorRequest(client, request, new RequestContentLengthMismatchError());
+ return false;
+ }
+ process.emitWarning(new RequestContentLengthMismatchError());
+ }
+ if (contentLength != null) {
+ assert(body, "no body must not have content length");
+ headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}`;
+ }
+ session.ref();
+ const shouldEndStream = method === "GET" || method === "HEAD";
+ if (expectContinue) {
+ headers[HTTP2_HEADER_EXPECT] = "100-continue";
+ stream = session.request(headers, { endStream: shouldEndStream, signal });
+ stream.once("continue", writeBodyH2);
+ } else {
+ stream = session.request(headers, {
+ endStream: shouldEndStream,
+ signal
+ });
+ writeBodyH2();
+ }
+ ++h2State.openStreams;
+ stream.once("response", (headers2) => {
+ if (request.onHeaders(Number(headers2[HTTP2_HEADER_STATUS]), headers2, stream.resume.bind(stream), "") === false) {
+ stream.pause();
+ }
+ });
+ stream.once("end", () => {
+ request.onComplete([]);
+ });
+ stream.on("data", (chunk) => {
+ if (request.onData(chunk) === false)
+ stream.pause();
+ });
+ stream.once("close", () => {
+ h2State.openStreams -= 1;
+ if (h2State.openStreams === 0)
+ session.unref();
+ });
+ stream.once("error", function(err) {
+ if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) {
+ h2State.streams -= 1;
+ util.destroy(stream, err);
+ }
+ });
+ stream.once("frameError", (type, code) => {
+ const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`);
+ errorRequest(client, request, err);
+ if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) {
+ h2State.streams -= 1;
+ util.destroy(stream, err);
+ }
+ });
+ return true;
+ function writeBodyH2() {
+ if (!body) {
+ request.onRequestSent();
+ } else if (util.isBuffer(body)) {
+ assert(contentLength === body.byteLength, "buffer body must have content length");
+ stream.cork();
+ stream.write(body);
+ stream.uncork();
+ request.onBodySent(body);
+ request.onRequestSent();
+ } else if (util.isBlobLike(body)) {
+ if (typeof body.stream === "function") {
+ writeIterable({
+ client,
+ request,
+ contentLength,
+ h2stream: stream,
+ expectsPayload,
+ body: body.stream(),
+ socket: client[kSocket],
+ header: ""
+ });
+ } else {
+ writeBlob({
+ body,
+ client,
+ request,
+ contentLength,
+ expectsPayload,
+ h2stream: stream,
+ header: "",
+ socket: client[kSocket]
+ });
+ }
+ } else if (util.isStream(body)) {
+ writeStream({
+ body,
+ client,
+ request,
+ contentLength,
+ expectsPayload,
+ socket: client[kSocket],
+ h2stream: stream,
+ header: ""
+ });
+ } else if (util.isIterable(body)) {
+ writeIterable({
+ body,
+ client,
+ request,
+ contentLength,
+ expectsPayload,
+ header: "",
+ h2stream: stream,
+ socket: client[kSocket]
+ });
+ } else {
+ assert(false);
+ }
+ }
+ }
+ function writeStream({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {
assert(contentLength !== 0 || client[kRunning] === 0, "stream body cannot be pipelined");
+ if (client[kHTTPConnVersion] === "h2") {
+ let onPipeData = function(chunk) {
+ request.onBodySent(chunk);
+ };
+ const pipe = pipeline(body, h2stream, (err) => {
+ if (err) {
+ util.destroy(body, err);
+ util.destroy(h2stream, err);
+ } else {
+ request.onRequestSent();
+ }
+ });
+ pipe.on("data", onPipeData);
+ pipe.once("end", () => {
+ pipe.removeListener("data", onPipeData);
+ util.destroy(pipe);
+ });
+ return;
+ }
let finished = false;
const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header });
const onData = function(chunk) {
@@ -10230,19 +10627,26 @@ upgrade: ${upgrade}\r
}
socket.on("drain", onDrain).on("error", onFinished);
}
- async function writeBlob({ body, client, request, socket, contentLength, header, expectsPayload }) {
+ async function writeBlob({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {
assert(contentLength === body.size, "blob body must have content length");
+ const isH2 = client[kHTTPConnVersion] === "h2";
try {
if (contentLength != null && contentLength !== body.size) {
throw new RequestContentLengthMismatchError();
}
const buffer = Buffer.from(await body.arrayBuffer());
- socket.cork();
- socket.write(`${header}content-length: ${contentLength}\r
+ if (isH2) {
+ h2stream.cork();
+ h2stream.write(buffer);
+ h2stream.uncork();
+ } else {
+ socket.cork();
+ socket.write(`${header}content-length: ${contentLength}\r
\r
`, "latin1");
- socket.write(buffer);
- socket.uncork();
+ socket.write(buffer);
+ socket.uncork();
+ }
request.onBodySent(buffer);
request.onRequestSent();
if (!expectsPayload) {
@@ -10250,10 +10654,10 @@ upgrade: ${upgrade}\r
}
resume(client);
} catch (err) {
- util.destroy(socket, err);
+ util.destroy(isH2 ? h2stream : socket, err);
}
}
- async function writeIterable({ body, client, request, socket, contentLength, header, expectsPayload }) {
+ async function writeIterable({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) {
assert(contentLength !== 0 || client[kRunning] === 0, "iterator body cannot be pipelined");
let callback = null;
function onDrain() {
@@ -10271,6 +10675,24 @@ upgrade: ${upgrade}\r
callback = resolve;
}
});
+ if (client[kHTTPConnVersion] === "h2") {
+ h2stream.on("close", onDrain).on("drain", onDrain);
+ try {
+ for await (const chunk of body) {
+ if (socket[kError]) {
+ throw socket[kError];
+ }
+ if (!h2stream.write(chunk)) {
+ await waitForDrain();
+ }
+ }
+ } catch (err) {
+ h2stream.destroy(err);
+ } finally {
+ h2stream.off("close", onDrain).off("drain", onDrain);
+ }
+ return;
+ }
socket.on("close", onDrain).on("drain", onDrain);
const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header });
try {
@@ -10442,6 +10864,7 @@ var require_pool = __commonJS({
socketPath,
autoSelectFamily,
autoSelectFamilyAttemptTimeout,
+ allowH2,
...options
} = {}) {
super();
@@ -10458,6 +10881,7 @@ var require_pool = __commonJS({
connect = buildConnector({
...tls,
maxCachedSessions,
+ allowH2,
socketPath,
timeout: connectTimeout == null ? 1e4 : connectTimeout,
...util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : void 0,
@@ -10467,7 +10891,7 @@ var require_pool = __commonJS({
this[kInterceptors] = options.interceptors && options.interceptors.Pool && Array.isArray(options.interceptors.Pool) ? options.interceptors.Pool : [];
this[kConnections] = connections || null;
this[kUrl] = util.parseOrigin(origin);
- this[kOptions] = { ...util.deepClone(options), connect };
+ this[kOptions] = { ...util.deepClone(options), connect, allowH2 };
this[kOptions].interceptors = options.interceptors ? { ...options.interceptors } : void 0;
this[kFactory] = factory;
}
@@ -11383,7 +11807,7 @@ var require_fetch = __commonJS({
} catch (err) {
if (err.name === "AbortError") {
fetchParams.controller.connection.destroy();
- return makeAppropriateNetworkError(fetchParams);
+ return makeAppropriateNetworkError(fetchParams, err);
}
return makeNetworkError(err);
}
@@ -11498,15 +11922,28 @@ var require_fetch = __commonJS({
let codings = [];
let location = "";
const headers = new Headers();
- for (let n = 0; n < headersList.length; n += 2) {
- const key = headersList[n + 0].toString("latin1");
- const val = headersList[n + 1].toString("latin1");
- if (key.toLowerCase() === "content-encoding") {
- codings = val.toLowerCase().split(",").map((x) => x.trim()).reverse();
- } else if (key.toLowerCase() === "location") {
- location = val;
+ if (Array.isArray(headersList)) {
+ for (let n = 0; n < headersList.length; n += 2) {
+ const key = headersList[n + 0].toString("latin1");
+ const val = headersList[n + 1].toString("latin1");
+ if (key.toLowerCase() === "content-encoding") {
+ codings = val.toLowerCase().split(",").map((x) => x.trim());
+ } else if (key.toLowerCase() === "location") {
+ location = val;
+ }
+ headers.append(key, val);
+ }
+ } else {
+ const keys = Object.keys(headersList);
+ for (const key of keys) {
+ const val = headersList[key];
+ if (key.toLowerCase() === "content-encoding") {
+ codings = val.toLowerCase().split(",").map((x) => x.trim()).reverse();
+ } else if (key.toLowerCase() === "location") {
+ location = val;
+ }
+ headers.append(key, val);
}
- headers.append(key, val);
}
this.body = new Readable({ read: resume });
const decoders = [];
@@ -11591,11 +12028,1194 @@ var require_fetch = __commonJS({
}
});
+// lib/websocket/constants.js
+var require_constants3 = __commonJS({
+ "lib/websocket/constants.js"(exports2, module2) {
+ "use strict";
+ var uid = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11";
+ var staticPropertyDescriptors = {
+ enumerable: true,
+ writable: false,
+ configurable: false
+ };
+ var states = {
+ CONNECTING: 0,
+ OPEN: 1,
+ CLOSING: 2,
+ CLOSED: 3
+ };
+ var opcodes = {
+ CONTINUATION: 0,
+ TEXT: 1,
+ BINARY: 2,
+ CLOSE: 8,
+ PING: 9,
+ PONG: 10
+ };
+ var maxUnsigned16Bit = 2 ** 16 - 1;
+ var parserStates = {
+ INFO: 0,
+ PAYLOADLENGTH_16: 2,
+ PAYLOADLENGTH_64: 3,
+ READ_DATA: 4
+ };
+ var emptyBuffer = Buffer.allocUnsafe(0);
+ module2.exports = {
+ uid,
+ staticPropertyDescriptors,
+ states,
+ opcodes,
+ maxUnsigned16Bit,
+ parserStates,
+ emptyBuffer
+ };
+ }
+});
+
+// lib/websocket/symbols.js
+var require_symbols3 = __commonJS({
+ "lib/websocket/symbols.js"(exports2, module2) {
+ "use strict";
+ module2.exports = {
+ kWebSocketURL: Symbol("url"),
+ kReadyState: Symbol("ready state"),
+ kController: Symbol("controller"),
+ kResponse: Symbol("response"),
+ kBinaryType: Symbol("binary type"),
+ kSentClose: Symbol("sent close"),
+ kReceivedClose: Symbol("received close"),
+ kByteParser: Symbol("byte parser")
+ };
+ }
+});
+
+// lib/websocket/events.js
+var require_events = __commonJS({
+ "lib/websocket/events.js"(exports2, module2) {
+ "use strict";
+ var { webidl } = require_webidl();
+ var { kEnumerableProperty } = require_util();
+ var { MessagePort } = require("worker_threads");
+ var MessageEvent = class extends Event {
+ #eventInit;
+ constructor(type, eventInitDict = {}) {
+ webidl.argumentLengthCheck(arguments, 1, { header: "MessageEvent constructor" });
+ type = webidl.converters.DOMString(type);
+ eventInitDict = webidl.converters.MessageEventInit(eventInitDict);
+ super(type, eventInitDict);
+ this.#eventInit = eventInitDict;
+ }
+ get data() {
+ webidl.brandCheck(this, MessageEvent);
+ return this.#eventInit.data;
+ }
+ get origin() {
+ webidl.brandCheck(this, MessageEvent);
+ return this.#eventInit.origin;
+ }
+ get lastEventId() {
+ webidl.brandCheck(this, MessageEvent);
+ return this.#eventInit.lastEventId;
+ }
+ get source() {
+ webidl.brandCheck(this, MessageEvent);
+ return this.#eventInit.source;
+ }
+ get ports() {
+ webidl.brandCheck(this, MessageEvent);
+ if (!Object.isFrozen(this.#eventInit.ports)) {
+ Object.freeze(this.#eventInit.ports);
+ }
+ return this.#eventInit.ports;
+ }
+ initMessageEvent(type, bubbles = false, cancelable = false, data = null, origin = "", lastEventId = "", source = null, ports = []) {
+ webidl.brandCheck(this, MessageEvent);
+ webidl.argumentLengthCheck(arguments, 1, { header: "MessageEvent.initMessageEvent" });
+ return new MessageEvent(type, {
+ bubbles,
+ cancelable,
+ data,
+ origin,
+ lastEventId,
+ source,
+ ports
+ });
+ }
+ };
+ var CloseEvent = class extends Event {
+ #eventInit;
+ constructor(type, eventInitDict = {}) {
+ webidl.argumentLengthCheck(arguments, 1, { header: "CloseEvent constructor" });
+ type = webidl.converters.DOMString(type);
+ eventInitDict = webidl.converters.CloseEventInit(eventInitDict);
+ super(type, eventInitDict);
+ this.#eventInit = eventInitDict;
+ }
+ get wasClean() {
+ webidl.brandCheck(this, CloseEvent);
+ return this.#eventInit.wasClean;
+ }
+ get code() {
+ webidl.brandCheck(this, CloseEvent);
+ return this.#eventInit.code;
+ }
+ get reason() {
+ webidl.brandCheck(this, CloseEvent);
+ return this.#eventInit.reason;
+ }
+ };
+ var ErrorEvent = class extends Event {
+ #eventInit;
+ constructor(type, eventInitDict) {
+ webidl.argumentLengthCheck(arguments, 1, { header: "ErrorEvent constructor" });
+ super(type, eventInitDict);
+ type = webidl.converters.DOMString(type);
+ eventInitDict = webidl.converters.ErrorEventInit(eventInitDict ?? {});
+ this.#eventInit = eventInitDict;
+ }
+ get message() {
+ webidl.brandCheck(this, ErrorEvent);
+ return this.#eventInit.message;
+ }
+ get filename() {
+ webidl.brandCheck(this, ErrorEvent);
+ return this.#eventInit.filename;
+ }
+ get lineno() {
+ webidl.brandCheck(this, ErrorEvent);
+ return this.#eventInit.lineno;
+ }
+ get colno() {
+ webidl.brandCheck(this, ErrorEvent);
+ return this.#eventInit.colno;
+ }
+ get error() {
+ webidl.brandCheck(this, ErrorEvent);
+ return this.#eventInit.error;
+ }
+ };
+ Object.defineProperties(MessageEvent.prototype, {
+ [Symbol.toStringTag]: {
+ value: "MessageEvent",
+ configurable: true
+ },
+ data: kEnumerableProperty,
+ origin: kEnumerableProperty,
+ lastEventId: kEnumerableProperty,
+ source: kEnumerableProperty,
+ ports: kEnumerableProperty,
+ initMessageEvent: kEnumerableProperty
+ });
+ Object.defineProperties(CloseEvent.prototype, {
+ [Symbol.toStringTag]: {
+ value: "CloseEvent",
+ configurable: true
+ },
+ reason: kEnumerableProperty,
+ code: kEnumerableProperty,
+ wasClean: kEnumerableProperty
+ });
+ Object.defineProperties(ErrorEvent.prototype, {
+ [Symbol.toStringTag]: {
+ value: "ErrorEvent",
+ configurable: true
+ },
+ message: kEnumerableProperty,
+ filename: kEnumerableProperty,
+ lineno: kEnumerableProperty,
+ colno: kEnumerableProperty,
+ error: kEnumerableProperty
+ });
+ webidl.converters.MessagePort = webidl.interfaceConverter(MessagePort);
+ webidl.converters["sequence"] = webidl.sequenceConverter(webidl.converters.MessagePort);
+ var eventInit = [
+ {
+ key: "bubbles",
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ },
+ {
+ key: "cancelable",
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ },
+ {
+ key: "composed",
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ }
+ ];
+ webidl.converters.MessageEventInit = webidl.dictionaryConverter([
+ ...eventInit,
+ {
+ key: "data",
+ converter: webidl.converters.any,
+ defaultValue: null
+ },
+ {
+ key: "origin",
+ converter: webidl.converters.USVString,
+ defaultValue: ""
+ },
+ {
+ key: "lastEventId",
+ converter: webidl.converters.DOMString,
+ defaultValue: ""
+ },
+ {
+ key: "source",
+ converter: webidl.nullableConverter(webidl.converters.MessagePort),
+ defaultValue: null
+ },
+ {
+ key: "ports",
+ converter: webidl.converters["sequence"],
+ get defaultValue() {
+ return [];
+ }
+ }
+ ]);
+ webidl.converters.CloseEventInit = webidl.dictionaryConverter([
+ ...eventInit,
+ {
+ key: "wasClean",
+ converter: webidl.converters.boolean,
+ defaultValue: false
+ },
+ {
+ key: "code",
+ converter: webidl.converters["unsigned short"],
+ defaultValue: 0
+ },
+ {
+ key: "reason",
+ converter: webidl.converters.USVString,
+ defaultValue: ""
+ }
+ ]);
+ webidl.converters.ErrorEventInit = webidl.dictionaryConverter([
+ ...eventInit,
+ {
+ key: "message",
+ converter: webidl.converters.DOMString,
+ defaultValue: ""
+ },
+ {
+ key: "filename",
+ converter: webidl.converters.USVString,
+ defaultValue: ""
+ },
+ {
+ key: "lineno",
+ converter: webidl.converters["unsigned long"],
+ defaultValue: 0
+ },
+ {
+ key: "colno",
+ converter: webidl.converters["unsigned long"],
+ defaultValue: 0
+ },
+ {
+ key: "error",
+ converter: webidl.converters.any
+ }
+ ]);
+ module2.exports = {
+ MessageEvent,
+ CloseEvent,
+ ErrorEvent
+ };
+ }
+});
+
+// lib/websocket/util.js
+var require_util3 = __commonJS({
+ "lib/websocket/util.js"(exports2, module2) {
+ "use strict";
+ var { kReadyState, kController, kResponse, kBinaryType, kWebSocketURL } = require_symbols3();
+ var { states, opcodes } = require_constants3();
+ var { MessageEvent, ErrorEvent } = require_events();
+ function isEstablished(ws) {
+ return ws[kReadyState] === states.OPEN;
+ }
+ function isClosing(ws) {
+ return ws[kReadyState] === states.CLOSING;
+ }
+ function isClosed(ws) {
+ return ws[kReadyState] === states.CLOSED;
+ }
+ function fireEvent(e, target, eventConstructor = Event, eventInitDict) {
+ const event = new eventConstructor(e, eventInitDict);
+ target.dispatchEvent(event);
+ }
+ function websocketMessageReceived(ws, type, data) {
+ if (ws[kReadyState] !== states.OPEN) {
+ return;
+ }
+ let dataForEvent;
+ if (type === opcodes.TEXT) {
+ try {
+ dataForEvent = new TextDecoder("utf-8", { fatal: true }).decode(data);
+ } catch {
+ failWebsocketConnection(ws, "Received invalid UTF-8 in text frame.");
+ return;
+ }
+ } else if (type === opcodes.BINARY) {
+ if (ws[kBinaryType] === "blob") {
+ dataForEvent = new Blob([data]);
+ } else {
+ dataForEvent = new Uint8Array(data).buffer;
+ }
+ }
+ fireEvent("message", ws, MessageEvent, {
+ origin: ws[kWebSocketURL].origin,
+ data: dataForEvent
+ });
+ }
+ function isValidSubprotocol(protocol) {
+ if (protocol.length === 0) {
+ return false;
+ }
+ for (const char of protocol) {
+ const code = char.charCodeAt(0);
+ if (code < 33 || code > 126 || char === "(" || char === ")" || char === "<" || char === ">" || char === "@" || char === "," || char === ";" || char === ":" || char === "\\" || char === '"' || char === "/" || char === "[" || char === "]" || char === "?" || char === "=" || char === "{" || char === "}" || code === 32 || code === 9) {
+ return false;
+ }
+ }
+ return true;
+ }
+ function isValidStatusCode(code) {
+ if (code >= 1e3 && code < 1015) {
+ return code !== 1004 && code !== 1005 && code !== 1006;
+ }
+ return code >= 3e3 && code <= 4999;
+ }
+ function failWebsocketConnection(ws, reason) {
+ const { [kController]: controller, [kResponse]: response } = ws;
+ controller.abort();
+ if (response?.socket && !response.socket.destroyed) {
+ response.socket.destroy();
+ }
+ if (reason) {
+ fireEvent("error", ws, ErrorEvent, {
+ error: new Error(reason)
+ });
+ }
+ }
+ module2.exports = {
+ isEstablished,
+ isClosing,
+ isClosed,
+ fireEvent,
+ isValidSubprotocol,
+ isValidStatusCode,
+ failWebsocketConnection,
+ websocketMessageReceived
+ };
+ }
+});
+
+// lib/websocket/connection.js
+var require_connection = __commonJS({
+ "lib/websocket/connection.js"(exports2, module2) {
+ "use strict";
+ var diagnosticsChannel = require("diagnostics_channel");
+ var { uid, states } = require_constants3();
+ var {
+ kReadyState,
+ kSentClose,
+ kByteParser,
+ kReceivedClose
+ } = require_symbols3();
+ var { fireEvent, failWebsocketConnection } = require_util3();
+ var { CloseEvent } = require_events();
+ var { makeRequest } = require_request();
+ var { fetching } = require_fetch();
+ var { Headers } = require_headers();
+ var { getGlobalDispatcher } = require_global2();
+ var { kHeadersList } = require_symbols();
+ var channels = {};
+ channels.open = diagnosticsChannel.channel("undici:websocket:open");
+ channels.close = diagnosticsChannel.channel("undici:websocket:close");
+ channels.socketError = diagnosticsChannel.channel("undici:websocket:socket_error");
+ var crypto;
+ try {
+ crypto = require("crypto");
+ } catch {
+ }
+ function establishWebSocketConnection(url, protocols, ws, onEstablish, options) {
+ const requestURL = url;
+ requestURL.protocol = url.protocol === "ws:" ? "http:" : "https:";
+ const request = makeRequest({
+ urlList: [requestURL],
+ serviceWorkers: "none",
+ referrer: "no-referrer",
+ mode: "websocket",
+ credentials: "include",
+ cache: "no-store",
+ redirect: "error"
+ });
+ if (options.headers) {
+ const headersList = new Headers(options.headers)[kHeadersList];
+ request.headersList = headersList;
+ }
+ const keyValue = crypto.randomBytes(16).toString("base64");
+ request.headersList.append("sec-websocket-key", keyValue);
+ request.headersList.append("sec-websocket-version", "13");
+ for (const protocol of protocols) {
+ request.headersList.append("sec-websocket-protocol", protocol);
+ }
+ const permessageDeflate = "";
+ const controller = fetching({
+ request,
+ useParallelQueue: true,
+ dispatcher: options.dispatcher ?? getGlobalDispatcher(),
+ processResponse(response) {
+ if (response.type === "error" || response.status !== 101) {
+ failWebsocketConnection(ws, "Received network error or non-101 status code.");
+ return;
+ }
+ if (protocols.length !== 0 && !response.headersList.get("Sec-WebSocket-Protocol")) {
+ failWebsocketConnection(ws, "Server did not respond with sent protocols.");
+ return;
+ }
+ if (response.headersList.get("Upgrade")?.toLowerCase() !== "websocket") {
+ failWebsocketConnection(ws, 'Server did not set Upgrade header to "websocket".');
+ return;
+ }
+ if (response.headersList.get("Connection")?.toLowerCase() !== "upgrade") {
+ failWebsocketConnection(ws, 'Server did not set Connection header to "upgrade".');
+ return;
+ }
+ const secWSAccept = response.headersList.get("Sec-WebSocket-Accept");
+ const digest = crypto.createHash("sha1").update(keyValue + uid).digest("base64");
+ if (secWSAccept !== digest) {
+ failWebsocketConnection(ws, "Incorrect hash received in Sec-WebSocket-Accept header.");
+ return;
+ }
+ const secExtension = response.headersList.get("Sec-WebSocket-Extensions");
+ if (secExtension !== null && secExtension !== permessageDeflate) {
+ failWebsocketConnection(ws, "Received different permessage-deflate than the one set.");
+ return;
+ }
+ const secProtocol = response.headersList.get("Sec-WebSocket-Protocol");
+ if (secProtocol !== null && secProtocol !== request.headersList.get("Sec-WebSocket-Protocol")) {
+ failWebsocketConnection(ws, "Protocol was not set in the opening handshake.");
+ return;
+ }
+ response.socket.on("data", onSocketData);
+ response.socket.on("close", onSocketClose);
+ response.socket.on("error", onSocketError);
+ if (channels.open.hasSubscribers) {
+ channels.open.publish({
+ address: response.socket.address(),
+ protocol: secProtocol,
+ extensions: secExtension
+ });
+ }
+ onEstablish(response);
+ }
+ });
+ return controller;
+ }
+ function onSocketData(chunk) {
+ if (!this.ws[kByteParser].write(chunk)) {
+ this.pause();
+ }
+ }
+ function onSocketClose() {
+ const { ws } = this;
+ const wasClean = ws[kSentClose] && ws[kReceivedClose];
+ let code = 1005;
+ let reason = "";
+ const result = ws[kByteParser].closingInfo;
+ if (result) {
+ code = result.code ?? 1005;
+ reason = result.reason;
+ } else if (!ws[kSentClose]) {
+ code = 1006;
+ }
+ ws[kReadyState] = states.CLOSED;
+ fireEvent("close", ws, CloseEvent, {
+ wasClean,
+ code,
+ reason
+ });
+ if (channels.close.hasSubscribers) {
+ channels.close.publish({
+ websocket: ws,
+ code,
+ reason
+ });
+ }
+ }
+ function onSocketError(error) {
+ const { ws } = this;
+ ws[kReadyState] = states.CLOSING;
+ if (channels.socketError.hasSubscribers) {
+ channels.socketError.publish(error);
+ }
+ this.destroy();
+ }
+ module2.exports = {
+ establishWebSocketConnection
+ };
+ }
+});
+
+// lib/websocket/frame.js
+var require_frame = __commonJS({
+ "lib/websocket/frame.js"(exports2, module2) {
+ "use strict";
+ var { maxUnsigned16Bit } = require_constants3();
+ var crypto;
+ try {
+ crypto = require("crypto");
+ } catch {
+ }
+ var WebsocketFrameSend = class {
+ constructor(data) {
+ this.frameData = data;
+ this.maskKey = crypto.randomBytes(4);
+ }
+ createFrame(opcode) {
+ const bodyLength = this.frameData?.byteLength ?? 0;
+ let payloadLength = bodyLength;
+ let offset = 6;
+ if (bodyLength > maxUnsigned16Bit) {
+ offset += 8;
+ payloadLength = 127;
+ } else if (bodyLength > 125) {
+ offset += 2;
+ payloadLength = 126;
+ }
+ const buffer = Buffer.allocUnsafe(bodyLength + offset);
+ buffer[0] = buffer[1] = 0;
+ buffer[0] |= 128;
+ buffer[0] = (buffer[0] & 240) + opcode;
+ buffer[offset - 4] = this.maskKey[0];
+ buffer[offset - 3] = this.maskKey[1];
+ buffer[offset - 2] = this.maskKey[2];
+ buffer[offset - 1] = this.maskKey[3];
+ buffer[1] = payloadLength;
+ if (payloadLength === 126) {
+ buffer.writeUInt16BE(bodyLength, 2);
+ } else if (payloadLength === 127) {
+ buffer[2] = buffer[3] = 0;
+ buffer.writeUIntBE(bodyLength, 4, 6);
+ }
+ buffer[1] |= 128;
+ for (let i = 0; i < bodyLength; i++) {
+ buffer[offset + i] = this.frameData[i] ^ this.maskKey[i % 4];
+ }
+ return buffer;
+ }
+ };
+ module2.exports = {
+ WebsocketFrameSend
+ };
+ }
+});
+
+// lib/websocket/receiver.js
+var require_receiver = __commonJS({
+ "lib/websocket/receiver.js"(exports2, module2) {
+ "use strict";
+ var { Writable } = require("stream");
+ var diagnosticsChannel = require("diagnostics_channel");
+ var { parserStates, opcodes, states, emptyBuffer } = require_constants3();
+ var { kReadyState, kSentClose, kResponse, kReceivedClose } = require_symbols3();
+ var { isValidStatusCode, failWebsocketConnection, websocketMessageReceived } = require_util3();
+ var { WebsocketFrameSend } = require_frame();
+ var channels = {};
+ channels.ping = diagnosticsChannel.channel("undici:websocket:ping");
+ channels.pong = diagnosticsChannel.channel("undici:websocket:pong");
+ var ByteParser = class extends Writable {
+ #buffers = [];
+ #byteOffset = 0;
+ #state = parserStates.INFO;
+ #info = {};
+ #fragments = [];
+ constructor(ws) {
+ super();
+ this.ws = ws;
+ }
+ _write(chunk, _, callback) {
+ this.#buffers.push(chunk);
+ this.#byteOffset += chunk.length;
+ this.run(callback);
+ }
+ run(callback) {
+ while (true) {
+ if (this.#state === parserStates.INFO) {
+ if (this.#byteOffset < 2) {
+ return callback();
+ }
+ const buffer = this.consume(2);
+ this.#info.fin = (buffer[0] & 128) !== 0;
+ this.#info.opcode = buffer[0] & 15;
+ this.#info.originalOpcode ??= this.#info.opcode;
+ this.#info.fragmented = !this.#info.fin && this.#info.opcode !== opcodes.CONTINUATION;
+ if (this.#info.fragmented && this.#info.opcode !== opcodes.BINARY && this.#info.opcode !== opcodes.TEXT) {
+ failWebsocketConnection(this.ws, "Invalid frame type was fragmented.");
+ return;
+ }
+ const payloadLength = buffer[1] & 127;
+ if (payloadLength <= 125) {
+ this.#info.payloadLength = payloadLength;
+ this.#state = parserStates.READ_DATA;
+ } else if (payloadLength === 126) {
+ this.#state = parserStates.PAYLOADLENGTH_16;
+ } else if (payloadLength === 127) {
+ this.#state = parserStates.PAYLOADLENGTH_64;
+ }
+ if (this.#info.fragmented && payloadLength > 125) {
+ failWebsocketConnection(this.ws, "Fragmented frame exceeded 125 bytes.");
+ return;
+ } else if ((this.#info.opcode === opcodes.PING || this.#info.opcode === opcodes.PONG || this.#info.opcode === opcodes.CLOSE) && payloadLength > 125) {
+ failWebsocketConnection(this.ws, "Payload length for control frame exceeded 125 bytes.");
+ return;
+ } else if (this.#info.opcode === opcodes.CLOSE) {
+ if (payloadLength === 1) {
+ failWebsocketConnection(this.ws, "Received close frame with a 1-byte body.");
+ return;
+ }
+ const body = this.consume(payloadLength);
+ this.#info.closeInfo = this.parseCloseBody(false, body);
+ if (!this.ws[kSentClose]) {
+ const body2 = Buffer.allocUnsafe(2);
+ body2.writeUInt16BE(this.#info.closeInfo.code, 0);
+ const closeFrame = new WebsocketFrameSend(body2);
+ this.ws[kResponse].socket.write(closeFrame.createFrame(opcodes.CLOSE), (err) => {
+ if (!err) {
+ this.ws[kSentClose] = true;
+ }
+ });
+ }
+ this.ws[kReadyState] = states.CLOSING;
+ this.ws[kReceivedClose] = true;
+ this.end();
+ return;
+ } else if (this.#info.opcode === opcodes.PING) {
+ const body = this.consume(payloadLength);
+ if (!this.ws[kReceivedClose]) {
+ const frame = new WebsocketFrameSend(body);
+ this.ws[kResponse].socket.write(frame.createFrame(opcodes.PONG));
+ if (channels.ping.hasSubscribers) {
+ channels.ping.publish({
+ payload: body
+ });
+ }
+ }
+ this.#state = parserStates.INFO;
+ if (this.#byteOffset > 0) {
+ continue;
+ } else {
+ callback();
+ return;
+ }
+ } else if (this.#info.opcode === opcodes.PONG) {
+ const body = this.consume(payloadLength);
+ if (channels.pong.hasSubscribers) {
+ channels.pong.publish({
+ payload: body
+ });
+ }
+ if (this.#byteOffset > 0) {
+ continue;
+ } else {
+ callback();
+ return;
+ }
+ }
+ } else if (this.#state === parserStates.PAYLOADLENGTH_16) {
+ if (this.#byteOffset < 2) {
+ return callback();
+ }
+ const buffer = this.consume(2);
+ this.#info.payloadLength = buffer.readUInt16BE(0);
+ this.#state = parserStates.READ_DATA;
+ } else if (this.#state === parserStates.PAYLOADLENGTH_64) {
+ if (this.#byteOffset < 8) {
+ return callback();
+ }
+ const buffer = this.consume(8);
+ const upper = buffer.readUInt32BE(0);
+ if (upper > 2 ** 31 - 1) {
+ failWebsocketConnection(this.ws, "Received payload length > 2^31 bytes.");
+ return;
+ }
+ const lower = buffer.readUInt32BE(4);
+ this.#info.payloadLength = (upper << 8) + lower;
+ this.#state = parserStates.READ_DATA;
+ } else if (this.#state === parserStates.READ_DATA) {
+ if (this.#byteOffset < this.#info.payloadLength) {
+ return callback();
+ } else if (this.#byteOffset >= this.#info.payloadLength) {
+ const body = this.consume(this.#info.payloadLength);
+ this.#fragments.push(body);
+ if (!this.#info.fragmented || this.#info.fin && this.#info.opcode === opcodes.CONTINUATION) {
+ const fullMessage = Buffer.concat(this.#fragments);
+ websocketMessageReceived(this.ws, this.#info.originalOpcode, fullMessage);
+ this.#info = {};
+ this.#fragments.length = 0;
+ }
+ this.#state = parserStates.INFO;
+ }
+ }
+ if (this.#byteOffset > 0) {
+ continue;
+ } else {
+ callback();
+ break;
+ }
+ }
+ }
+ consume(n) {
+ if (n > this.#byteOffset) {
+ return null;
+ } else if (n === 0) {
+ return emptyBuffer;
+ }
+ if (this.#buffers[0].length === n) {
+ this.#byteOffset -= this.#buffers[0].length;
+ return this.#buffers.shift();
+ }
+ const buffer = Buffer.allocUnsafe(n);
+ let offset = 0;
+ while (offset !== n) {
+ const next = this.#buffers[0];
+ const { length } = next;
+ if (length + offset === n) {
+ buffer.set(this.#buffers.shift(), offset);
+ break;
+ } else if (length + offset > n) {
+ buffer.set(next.subarray(0, n - offset), offset);
+ this.#buffers[0] = next.subarray(n - offset);
+ break;
+ } else {
+ buffer.set(this.#buffers.shift(), offset);
+ offset += next.length;
+ }
+ }
+ this.#byteOffset -= n;
+ return buffer;
+ }
+ parseCloseBody(onlyCode, data) {
+ let code;
+ if (data.length >= 2) {
+ code = data.readUInt16BE(0);
+ }
+ if (onlyCode) {
+ if (!isValidStatusCode(code)) {
+ return null;
+ }
+ return { code };
+ }
+ let reason = data.subarray(2);
+ if (reason[0] === 239 && reason[1] === 187 && reason[2] === 191) {
+ reason = reason.subarray(3);
+ }
+ if (code !== void 0 && !isValidStatusCode(code)) {
+ return null;
+ }
+ try {
+ reason = new TextDecoder("utf-8", { fatal: true }).decode(reason);
+ } catch {
+ return null;
+ }
+ return { code, reason };
+ }
+ get closingInfo() {
+ return this.#info.closeInfo;
+ }
+ };
+ module2.exports = {
+ ByteParser
+ };
+ }
+});
+
+// lib/websocket/websocket.js
+var require_websocket = __commonJS({
+ "lib/websocket/websocket.js"(exports2, module2) {
+ "use strict";
+ var { webidl } = require_webidl();
+ var { DOMException } = require_constants();
+ var { URLSerializer } = require_dataURL();
+ var { getGlobalOrigin } = require_global();
+ var { staticPropertyDescriptors, states, opcodes, emptyBuffer } = require_constants3();
+ var {
+ kWebSocketURL,
+ kReadyState,
+ kController,
+ kBinaryType,
+ kResponse,
+ kSentClose,
+ kByteParser
+ } = require_symbols3();
+ var { isEstablished, isClosing, isValidSubprotocol, failWebsocketConnection, fireEvent } = require_util3();
+ var { establishWebSocketConnection } = require_connection();
+ var { WebsocketFrameSend } = require_frame();
+ var { ByteParser } = require_receiver();
+ var { kEnumerableProperty, isBlobLike } = require_util();
+ var { getGlobalDispatcher } = require_global2();
+ var { types } = require("util");
+ var experimentalWarned = false;
+ var WebSocket = class extends EventTarget {
+ #events = {
+ open: null,
+ error: null,
+ close: null,
+ message: null
+ };
+ #bufferedAmount = 0;
+ #protocol = "";
+ #extensions = "";
+ constructor(url, protocols = []) {
+ super();
+ webidl.argumentLengthCheck(arguments, 1, { header: "WebSocket constructor" });
+ if (!experimentalWarned) {
+ experimentalWarned = true;
+ process.emitWarning("WebSockets are experimental, expect them to change at any time.", {
+ code: "UNDICI-WS"
+ });
+ }
+ const options = webidl.converters["DOMString or sequence or WebSocketInit"](protocols);
+ url = webidl.converters.USVString(url);
+ protocols = options.protocols;
+ const baseURL = getGlobalOrigin();
+ let urlRecord;
+ try {
+ urlRecord = new URL(url, baseURL);
+ } catch (e) {
+ throw new DOMException(e, "SyntaxError");
+ }
+ if (urlRecord.protocol === "http:") {
+ urlRecord.protocol = "ws:";
+ } else if (urlRecord.protocol === "https:") {
+ urlRecord.protocol = "wss:";
+ }
+ if (urlRecord.protocol !== "ws:" && urlRecord.protocol !== "wss:") {
+ throw new DOMException(`Expected a ws: or wss: protocol, got ${urlRecord.protocol}`, "SyntaxError");
+ }
+ if (urlRecord.hash || urlRecord.href.endsWith("#")) {
+ throw new DOMException("Got fragment", "SyntaxError");
+ }
+ if (typeof protocols === "string") {
+ protocols = [protocols];
+ }
+ if (protocols.length !== new Set(protocols.map((p) => p.toLowerCase())).size) {
+ throw new DOMException("Invalid Sec-WebSocket-Protocol value", "SyntaxError");
+ }
+ if (protocols.length > 0 && !protocols.every((p) => isValidSubprotocol(p))) {
+ throw new DOMException("Invalid Sec-WebSocket-Protocol value", "SyntaxError");
+ }
+ this[kWebSocketURL] = new URL(urlRecord.href);
+ this[kController] = establishWebSocketConnection(urlRecord, protocols, this, (response) => this.#onConnectionEstablished(response), options);
+ this[kReadyState] = WebSocket.CONNECTING;
+ this[kBinaryType] = "blob";
+ }
+ close(code = void 0, reason = void 0) {
+ webidl.brandCheck(this, WebSocket);
+ if (code !== void 0) {
+ code = webidl.converters["unsigned short"](code, { clamp: true });
+ }
+ if (reason !== void 0) {
+ reason = webidl.converters.USVString(reason);
+ }
+ if (code !== void 0) {
+ if (code !== 1e3 && (code < 3e3 || code > 4999)) {
+ throw new DOMException("invalid code", "InvalidAccessError");
+ }
+ }
+ let reasonByteLength = 0;
+ if (reason !== void 0) {
+ reasonByteLength = Buffer.byteLength(reason);
+ if (reasonByteLength > 123) {
+ throw new DOMException(`Reason must be less than 123 bytes; received ${reasonByteLength}`, "SyntaxError");
+ }
+ }
+ if (this[kReadyState] === WebSocket.CLOSING || this[kReadyState] === WebSocket.CLOSED) {
+ } else if (!isEstablished(this)) {
+ failWebsocketConnection(this, "Connection was closed before it was established.");
+ this[kReadyState] = WebSocket.CLOSING;
+ } else if (!isClosing(this)) {
+ const frame = new WebsocketFrameSend();
+ if (code !== void 0 && reason === void 0) {
+ frame.frameData = Buffer.allocUnsafe(2);
+ frame.frameData.writeUInt16BE(code, 0);
+ } else if (code !== void 0 && reason !== void 0) {
+ frame.frameData = Buffer.allocUnsafe(2 + reasonByteLength);
+ frame.frameData.writeUInt16BE(code, 0);
+ frame.frameData.write(reason, 2, "utf-8");
+ } else {
+ frame.frameData = emptyBuffer;
+ }
+ const socket = this[kResponse].socket;
+ socket.write(frame.createFrame(opcodes.CLOSE), (err) => {
+ if (!err) {
+ this[kSentClose] = true;
+ }
+ });
+ this[kReadyState] = states.CLOSING;
+ } else {
+ this[kReadyState] = WebSocket.CLOSING;
+ }
+ }
+ send(data) {
+ webidl.brandCheck(this, WebSocket);
+ webidl.argumentLengthCheck(arguments, 1, { header: "WebSocket.send" });
+ data = webidl.converters.WebSocketSendData(data);
+ if (this[kReadyState] === WebSocket.CONNECTING) {
+ throw new DOMException("Sent before connected.", "InvalidStateError");
+ }
+ if (!isEstablished(this) || isClosing(this)) {
+ return;
+ }
+ const socket = this[kResponse].socket;
+ if (typeof data === "string") {
+ const value = Buffer.from(data);
+ const frame = new WebsocketFrameSend(value);
+ const buffer = frame.createFrame(opcodes.TEXT);
+ this.#bufferedAmount += value.byteLength;
+ socket.write(buffer, () => {
+ this.#bufferedAmount -= value.byteLength;
+ });
+ } else if (types.isArrayBuffer(data)) {
+ const value = Buffer.from(data);
+ const frame = new WebsocketFrameSend(value);
+ const buffer = frame.createFrame(opcodes.BINARY);
+ this.#bufferedAmount += value.byteLength;
+ socket.write(buffer, () => {
+ this.#bufferedAmount -= value.byteLength;
+ });
+ } else if (ArrayBuffer.isView(data)) {
+ const ab = Buffer.from(data, data.byteOffset, data.byteLength);
+ const frame = new WebsocketFrameSend(ab);
+ const buffer = frame.createFrame(opcodes.BINARY);
+ this.#bufferedAmount += ab.byteLength;
+ socket.write(buffer, () => {
+ this.#bufferedAmount -= ab.byteLength;
+ });
+ } else if (isBlobLike(data)) {
+ const frame = new WebsocketFrameSend();
+ data.arrayBuffer().then((ab) => {
+ const value = Buffer.from(ab);
+ frame.frameData = value;
+ const buffer = frame.createFrame(opcodes.BINARY);
+ this.#bufferedAmount += value.byteLength;
+ socket.write(buffer, () => {
+ this.#bufferedAmount -= value.byteLength;
+ });
+ });
+ }
+ }
+ get readyState() {
+ webidl.brandCheck(this, WebSocket);
+ return this[kReadyState];
+ }
+ get bufferedAmount() {
+ webidl.brandCheck(this, WebSocket);
+ return this.#bufferedAmount;
+ }
+ get url() {
+ webidl.brandCheck(this, WebSocket);
+ return URLSerializer(this[kWebSocketURL]);
+ }
+ get extensions() {
+ webidl.brandCheck(this, WebSocket);
+ return this.#extensions;
+ }
+ get protocol() {
+ webidl.brandCheck(this, WebSocket);
+ return this.#protocol;
+ }
+ get onopen() {
+ webidl.brandCheck(this, WebSocket);
+ return this.#events.open;
+ }
+ set onopen(fn) {
+ webidl.brandCheck(this, WebSocket);
+ if (this.#events.open) {
+ this.removeEventListener("open", this.#events.open);
+ }
+ if (typeof fn === "function") {
+ this.#events.open = fn;
+ this.addEventListener("open", fn);
+ } else {
+ this.#events.open = null;
+ }
+ }
+ get onerror() {
+ webidl.brandCheck(this, WebSocket);
+ return this.#events.error;
+ }
+ set onerror(fn) {
+ webidl.brandCheck(this, WebSocket);
+ if (this.#events.error) {
+ this.removeEventListener("error", this.#events.error);
+ }
+ if (typeof fn === "function") {
+ this.#events.error = fn;
+ this.addEventListener("error", fn);
+ } else {
+ this.#events.error = null;
+ }
+ }
+ get onclose() {
+ webidl.brandCheck(this, WebSocket);
+ return this.#events.close;
+ }
+ set onclose(fn) {
+ webidl.brandCheck(this, WebSocket);
+ if (this.#events.close) {
+ this.removeEventListener("close", this.#events.close);
+ }
+ if (typeof fn === "function") {
+ this.#events.close = fn;
+ this.addEventListener("close", fn);
+ } else {
+ this.#events.close = null;
+ }
+ }
+ get onmessage() {
+ webidl.brandCheck(this, WebSocket);
+ return this.#events.message;
+ }
+ set onmessage(fn) {
+ webidl.brandCheck(this, WebSocket);
+ if (this.#events.message) {
+ this.removeEventListener("message", this.#events.message);
+ }
+ if (typeof fn === "function") {
+ this.#events.message = fn;
+ this.addEventListener("message", fn);
+ } else {
+ this.#events.message = null;
+ }
+ }
+ get binaryType() {
+ webidl.brandCheck(this, WebSocket);
+ return this[kBinaryType];
+ }
+ set binaryType(type) {
+ webidl.brandCheck(this, WebSocket);
+ if (type !== "blob" && type !== "arraybuffer") {
+ this[kBinaryType] = "blob";
+ } else {
+ this[kBinaryType] = type;
+ }
+ }
+ #onConnectionEstablished(response) {
+ this[kResponse] = response;
+ const parser = new ByteParser(this);
+ parser.on("drain", function onParserDrain() {
+ this.ws[kResponse].socket.resume();
+ });
+ response.socket.ws = this;
+ this[kByteParser] = parser;
+ this[kReadyState] = states.OPEN;
+ const extensions = response.headersList.get("sec-websocket-extensions");
+ if (extensions !== null) {
+ this.#extensions = extensions;
+ }
+ const protocol = response.headersList.get("sec-websocket-protocol");
+ if (protocol !== null) {
+ this.#protocol = protocol;
+ }
+ fireEvent("open", this);
+ }
+ };
+ WebSocket.CONNECTING = WebSocket.prototype.CONNECTING = states.CONNECTING;
+ WebSocket.OPEN = WebSocket.prototype.OPEN = states.OPEN;
+ WebSocket.CLOSING = WebSocket.prototype.CLOSING = states.CLOSING;
+ WebSocket.CLOSED = WebSocket.prototype.CLOSED = states.CLOSED;
+ Object.defineProperties(WebSocket.prototype, {
+ CONNECTING: staticPropertyDescriptors,
+ OPEN: staticPropertyDescriptors,
+ CLOSING: staticPropertyDescriptors,
+ CLOSED: staticPropertyDescriptors,
+ url: kEnumerableProperty,
+ readyState: kEnumerableProperty,
+ bufferedAmount: kEnumerableProperty,
+ onopen: kEnumerableProperty,
+ onerror: kEnumerableProperty,
+ onclose: kEnumerableProperty,
+ close: kEnumerableProperty,
+ onmessage: kEnumerableProperty,
+ binaryType: kEnumerableProperty,
+ send: kEnumerableProperty,
+ extensions: kEnumerableProperty,
+ protocol: kEnumerableProperty,
+ [Symbol.toStringTag]: {
+ value: "WebSocket",
+ writable: false,
+ enumerable: false,
+ configurable: true
+ }
+ });
+ Object.defineProperties(WebSocket, {
+ CONNECTING: staticPropertyDescriptors,
+ OPEN: staticPropertyDescriptors,
+ CLOSING: staticPropertyDescriptors,
+ CLOSED: staticPropertyDescriptors
+ });
+ webidl.converters["sequence"] = webidl.sequenceConverter(webidl.converters.DOMString);
+ webidl.converters["DOMString or sequence"] = function(V) {
+ if (webidl.util.Type(V) === "Object" && Symbol.iterator in V) {
+ return webidl.converters["sequence"](V);
+ }
+ return webidl.converters.DOMString(V);
+ };
+ webidl.converters.WebSocketInit = webidl.dictionaryConverter([
+ {
+ key: "protocols",
+ converter: webidl.converters["DOMString or sequence"],
+ get defaultValue() {
+ return [];
+ }
+ },
+ {
+ key: "dispatcher",
+ converter: (V) => V,
+ get defaultValue() {
+ return getGlobalDispatcher();
+ }
+ },
+ {
+ key: "headers",
+ converter: webidl.nullableConverter(webidl.converters.HeadersInit)
+ }
+ ]);
+ webidl.converters["DOMString or sequence or WebSocketInit"] = function(V) {
+ if (webidl.util.Type(V) === "Object" && !(Symbol.iterator in V)) {
+ return webidl.converters.WebSocketInit(V);
+ }
+ return { protocols: webidl.converters["DOMString or sequence"](V) };
+ };
+ webidl.converters.WebSocketSendData = function(V) {
+ if (webidl.util.Type(V) === "Object") {
+ if (isBlobLike(V)) {
+ return webidl.converters.Blob(V, { strict: false });
+ }
+ if (ArrayBuffer.isView(V) || types.isAnyArrayBuffer(V)) {
+ return webidl.converters.BufferSource(V);
+ }
+ }
+ return webidl.converters.USVString(V);
+ };
+ module2.exports = {
+ WebSocket
+ };
+ }
+});
+
// index-fetch.js
var fetchImpl = require_fetch().fetch;
-module.exports.fetch = async function fetch(resource) {
+module.exports.fetch = async function fetch(resource, init = void 0) {
try {
- return await fetchImpl(...arguments);
+ return await fetchImpl(resource, init);
} catch (err) {
Error.captureStackTrace(err, this);
throw err;
@@ -11605,4 +13225,6 @@ module.exports.FormData = require_formdata().FormData;
module.exports.Headers = require_headers().Headers;
module.exports.Response = require_response().Response;
module.exports.Request = require_request().Request;
+module.exports.WebSocket = require_websocket().WebSocket;
/*! formdata-polyfill. MIT License. Jimmy Wärting */
+/*! ws. MIT License. Einar Otto Stangvik */
diff --git a/deps/uv/uv.gyp b/deps/uv/uv.gyp
index 410323d7c69276..6c86c3fa50496d 100644
--- a/deps/uv/uv.gyp
+++ b/deps/uv/uv.gyp
@@ -136,7 +136,6 @@
'uv_sources_android': [
'src/unix/linux.c',
'src/unix/procfs-exepath.c',
- 'src/unix/pthread-fixes.c',
'src/unix/random-getentropy.c',
'src/unix/random-getrandom.c',
'src/unix/random-sysctl-linux.c',
@@ -398,7 +397,6 @@
}],
['OS=="zos"', {
'sources': [
- 'src/unix/pthread-fixes.c',
'src/unix/os390.c',
'src/unix/os390-syscalls.c'
]
diff --git a/deps/v8/src/flags/flags.cc b/deps/v8/src/flags/flags.cc
index e41b71f85ec657..78d7b82354cb04 100644
--- a/deps/v8/src/flags/flags.cc
+++ b/deps/v8/src/flags/flags.cc
@@ -10,6 +10,7 @@
#include
#include
#include
+#include
#include
#include "src/base/functional.h"
@@ -103,7 +104,12 @@ struct Flag {
const char* cmt_; // A comment about the flags purpose.
bool owns_ptr_; // Does the flag own its string value?
SetBy set_by_ = SetBy::kDefault;
+ // Name of the flag implying this flag, if any.
const char* implied_by_ = nullptr;
+#ifdef DEBUG
+ // Pointer to the flag implying this flag, if any.
+ const Flag* implied_by_ptr_ = nullptr;
+#endif
FlagType type() const { return type_; }
@@ -113,6 +119,17 @@ struct Flag {
bool PointsTo(const void* ptr) const { return valptr_ == ptr; }
+#ifdef DEBUG
+ bool ImpliedBy(const void* ptr) const {
+ const Flag* current = this->implied_by_ptr_;
+ while (current != nullptr) {
+ if (current->PointsTo(ptr)) return true;
+ current = current->implied_by_ptr_;
+ }
+ return false;
+ }
+#endif
+
bool bool_variable() const { return GetValue(); }
void set_bool_variable(bool value, SetBy set_by) {
@@ -333,6 +350,15 @@ struct Flag {
if (IsAnyImplication(new_set_by)) {
DCHECK_NOT_NULL(implied_by);
implied_by_ = implied_by;
+#ifdef DEBUG
+ // This only works when implied_by is a flag_name or !flag_name, but it
+ // can also be a condition e.g. flag_name > 3. Since this is only used for
+ // checks in DEBUG mode, we will just ignore the more complex conditions
+ // for now - that will just lead to a nullptr which won't be followed.
+ implied_by_ptr_ = static_cast(
+ FindFlagByName(implied_by[0] == '!' ? implied_by + 1 : implied_by));
+ DCHECK_NE(implied_by_ptr_, this);
+#endif
}
return change_flag;
}
@@ -534,15 +560,70 @@ uint32_t ComputeFlagListHash() {
std::ostringstream modified_args_as_string;
if (COMPRESS_POINTERS_BOOL) modified_args_as_string << "ptr-compr";
if (DEBUG_BOOL) modified_args_as_string << "debug";
+
+#ifdef DEBUG
+ // These two sets are used to check that we don't leave out any flags
+ // implied by --predictable in the list below.
+ std::set flags_implied_by_predictable;
+ std::set flags_ignored_because_of_predictable;
+#endif
+
for (const Flag& flag : flags) {
if (flag.IsDefault()) continue;
+#ifdef DEBUG
+ if (flag.ImpliedBy(&v8_flags.predictable)) {
+ flags_implied_by_predictable.insert(flag.name());
+ }
+#endif
// We want to be able to flip --profile-deserialization without
// causing the code cache to get invalidated by this hash.
if (flag.PointsTo(&v8_flags.profile_deserialization)) continue;
- // Skip v8_flags.random_seed to allow predictable code caching.
+ // Skip v8_flags.random_seed and v8_flags.predictable to allow predictable
+ // code caching.
if (flag.PointsTo(&v8_flags.random_seed)) continue;
+ if (flag.PointsTo(&v8_flags.predictable)) continue;
+
+ // The following flags are implied by --predictable (some negated).
+ if (flag.PointsTo(&v8_flags.concurrent_sparkplug) ||
+ flag.PointsTo(&v8_flags.concurrent_recompilation) ||
+#ifdef V8_ENABLE_MAGLEV
+ flag.PointsTo(&v8_flags.maglev_deopt_data_on_background) ||
+ flag.PointsTo(&v8_flags.maglev_build_code_on_background) ||
+#endif
+ flag.PointsTo(&v8_flags.parallel_scavenge) ||
+ flag.PointsTo(&v8_flags.concurrent_marking) ||
+ flag.PointsTo(&v8_flags.concurrent_array_buffer_sweeping) ||
+ flag.PointsTo(&v8_flags.parallel_marking) ||
+ flag.PointsTo(&v8_flags.concurrent_sweeping) ||
+ flag.PointsTo(&v8_flags.parallel_compaction) ||
+ flag.PointsTo(&v8_flags.parallel_pointer_update) ||
+ flag.PointsTo(&v8_flags.memory_reducer) ||
+ flag.PointsTo(&v8_flags.cppheap_concurrent_marking) ||
+ flag.PointsTo(&v8_flags.cppheap_incremental_marking) ||
+ flag.PointsTo(&v8_flags.single_threaded_gc)) {
+#ifdef DEBUG
+ if (flag.ImpliedBy(&v8_flags.predictable)) {
+ flags_ignored_because_of_predictable.insert(flag.name());
+ }
+#endif
+ continue;
+ }
modified_args_as_string << flag;
}
+
+#ifdef DEBUG
+ for (const char* name : flags_implied_by_predictable) {
+ if (flags_ignored_because_of_predictable.find(name) ==
+ flags_ignored_because_of_predictable.end()) {
+ PrintF(
+ "%s should be added to the list of "
+ "flags_ignored_because_of_predictable\n",
+ name);
+ UNREACHABLE();
+ }
+ }
+#endif
+
std::string args(modified_args_as_string.str());
// Generate a hash that is not 0.
uint32_t hash = static_cast(base::hash_range(
diff --git a/deps/zlib/zlib.gyp b/deps/zlib/zlib.gyp
index 49de2a6c6de903..26ceed9fbe6825 100644
--- a/deps/zlib/zlib.gyp
+++ b/deps/zlib/zlib.gyp
@@ -107,33 +107,33 @@
}],
],
}, # zlib_arm_crc32
- {
- 'target_name': 'zlib_crc32_simd',
- 'type': 'static_library',
- 'conditions': [
- ['OS!="win" or llvm_version!="0.0"', {
- 'cflags': [
- '-msse4.2',
- '-mpclmul',
- ],
- 'xcode_settings': {
- 'OTHER_CFLAGS': [
- '-msse4.2',
- '-mpclmul',
- ],
- },
- }]
- ],
- 'defines': [ 'CRC32_SIMD_SSE42_PCLMUL' ],
- 'include_dirs': [ '<(ZLIB_ROOT)' ],
- 'direct_dependent_settings': {
- 'defines': [ 'CRC32_SIMD_SSE42_PCLMUL' ],
- 'include_dirs': [ '<(ZLIB_ROOT)' ],
- },
- 'sources': [
- '& args) {
const int argc = 1;
Local argv[argc] = { args[0] };
Local cons =
- args.Data().As |
+20.7.1
20.7.0
20.6.1
20.6.0
@@ -45,6 +46,174 @@
* [io.js](CHANGELOG_IOJS.md)
* [Archive](CHANGELOG_ARCHIVE.md)
+
+
+## 2023-09-28, Version 20.7.1 (Current), @ruyadorno
+
+### Notable Changes
+
+#### Stream performance improvements
+
+Performance improvements to writable and readable streams, improving the creation and destruction by ±15% and reducing the memory overhead each stream takes in Node.js
+
+Contributed by Raz Luvaton in [#49834](https://github.com/nodejs/node/pull/49834) and Benjamin Gruenbaum in [#49745](https://github.com/nodejs/node/pull/49745).
+
+#### Other notable changes
+
+* \[[`f4041ce1c9`](https://github.com/nodejs/node/commit/f4041ce1c9)] - **doc**: deprecate `fs.F_OK`, `fs.R_OK`, `fs.W_OK`, `fs.X_OK` (Livia Medeiros) [#49683](https://github.com/nodejs/node/pull/49683)
+* \[[`0fbbe49cf6`](https://github.com/nodejs/node/commit/0fbbe49cf6)] - **doc**: promote fetch/webstreams from experimental to stable (Steven) [#45684](https://github.com/nodejs/node/pull/45684)
+* \[[`a5dd057540`](https://github.com/nodejs/node/commit/a5dd057540)] - **doc**: deprecate `util.toUSVString` (Yagiz Nizipli) [#49725](https://github.com/nodejs/node/pull/49725)
+* \[[`7b6a73172f`](https://github.com/nodejs/node/commit/7b6a73172f)] - **doc**: deprecate calling `promisify` on a function that returns a promise (Antoine du Hamel) [#49647](https://github.com/nodejs/node/pull/49647)
+* \[[`1beefd5f16`](https://github.com/nodejs/node/commit/1beefd5f16)] - **esm**: set all hooks as release candidate (Geoffrey Booth) [#49597](https://github.com/nodejs/node/pull/49597)
+* \[[`7c5e322346`](https://github.com/nodejs/node/commit/7c5e322346)] - **stream**: improve webstream readable async iterator performance (Raz Luvaton) [#49662](https://github.com/nodejs/node/pull/49662)
+
+### Commits
+
+* \[[`4879e3fbbe`](https://github.com/nodejs/node/commit/4879e3fbbe)] - **benchmark**: add a benchmark for read() of ReadableStreams (Debadree Chatterjee) [#49622](https://github.com/nodejs/node/pull/49622)
+* \[[`78a6c73157`](https://github.com/nodejs/node/commit/78a6c73157)] - **benchmark**: shorten pipe-to by reducing number of chunks (Raz Luvaton) [#49577](https://github.com/nodejs/node/pull/49577)
+* \[[`4126a6e4c9`](https://github.com/nodejs/node/commit/4126a6e4c9)] - **benchmark**: fix webstream pipe-to (Raz Luvaton) [#49552](https://github.com/nodejs/node/pull/49552)
+* \[[`6010a91825`](https://github.com/nodejs/node/commit/6010a91825)] - **bootstrap**: do not expand argv1 for snapshots (Joyee Cheung) [#49506](https://github.com/nodejs/node/pull/49506)
+* \[[`8480280c4b`](https://github.com/nodejs/node/commit/8480280c4b)] - **bootstrap**: only use the isolate snapshot when compiling code cache (Joyee Cheung) [#49288](https://github.com/nodejs/node/pull/49288)
+* \[[`b30754aa87`](https://github.com/nodejs/node/commit/b30754aa87)] - **build**: run embedtest using node executable (Joyee Cheung) [#49506](https://github.com/nodejs/node/pull/49506)
+* \[[`31db0b8e2b`](https://github.com/nodejs/node/commit/31db0b8e2b)] - **build**: add --write-snapshot-as-array-literals to configure.py (Joyee Cheung) [#49312](https://github.com/nodejs/node/pull/49312)
+* \[[`6fcb51d3ba`](https://github.com/nodejs/node/commit/6fcb51d3ba)] - **debugger**: use `internal/url.URL` instead of `url.parse` (LiviaMedeiros) [#49590](https://github.com/nodejs/node/pull/49590)
+* \[[`aea7371506`](https://github.com/nodejs/node/commit/aea7371506)] - **deps**: V8: backport de9a5de2274f (Joyee Cheung) [#49703](https://github.com/nodejs/node/pull/49703)
+* \[[`1f16df21f8`](https://github.com/nodejs/node/commit/1f16df21f8)] - **deps**: V8: cherry-pick b33bf2dfd261 (Joyee Cheung) [#49703](https://github.com/nodejs/node/pull/49703)
+* \[[`61d18d6473`](https://github.com/nodejs/node/commit/61d18d6473)] - **deps**: update undici to 5.24.0 (Node.js GitHub Bot) [#49559](https://github.com/nodejs/node/pull/49559)
+* \[[`b8a4fef393`](https://github.com/nodejs/node/commit/b8a4fef393)] - **deps**: remove pthread-fixes.c from uv.gyp (Ben Noordhuis) [#49744](https://github.com/nodejs/node/pull/49744)
+* \[[`6c86c0683c`](https://github.com/nodejs/node/commit/6c86c0683c)] - **deps**: update googletest to d1467f5 (Node.js GitHub Bot) [#49676](https://github.com/nodejs/node/pull/49676)
+* \[[`1424404742`](https://github.com/nodejs/node/commit/1424404742)] - **deps**: update nghttp2 to 1.56.0 (Node.js GitHub Bot) [#49582](https://github.com/nodejs/node/pull/49582)
+* \[[`15b54ff95d`](https://github.com/nodejs/node/commit/15b54ff95d)] - **deps**: update googletest to 8a6feab (Node.js GitHub Bot) [#49463](https://github.com/nodejs/node/pull/49463)
+* \[[`2ceab877c2`](https://github.com/nodejs/node/commit/2ceab877c2)] - **deps**: update corepack to 0.20.0 (Node.js GitHub Bot) [#49464](https://github.com/nodejs/node/pull/49464)
+* \[[`690cca396a`](https://github.com/nodejs/node/commit/690cca396a)] - **doc**: fix `DEP0176` number (LiviaMedeiros) [#49858](https://github.com/nodejs/node/pull/49858)
+* \[[`f4041ce1c9`](https://github.com/nodejs/node/commit/f4041ce1c9)] - **doc**: deprecate `fs.F_OK`, `fs.R_OK`, `fs.W_OK`, `fs.X_OK` (Livia Medeiros) [#49683](https://github.com/nodejs/node/pull/49683)
+* \[[`6a88c6a7af`](https://github.com/nodejs/node/commit/6a88c6a7af)] - **doc**: add mertcanaltin as a triager (mert.altin) [#49826](https://github.com/nodejs/node/pull/49826)
+* \[[`0fbbe49cf6`](https://github.com/nodejs/node/commit/0fbbe49cf6)] - **doc**: promote fetch/webstreams from experimental to stable (Steven) [#45684](https://github.com/nodejs/node/pull/45684)
+* \[[`864fe56432`](https://github.com/nodejs/node/commit/864fe56432)] - **doc**: add `git node backport` way to the backporting guide (Raz Luvaton) [#49760](https://github.com/nodejs/node/pull/49760)
+* \[[`e0f93492d5`](https://github.com/nodejs/node/commit/e0f93492d5)] - **doc**: improve documentation about ICU data fallback (Joyee Cheung) [#49666](https://github.com/nodejs/node/pull/49666)
+* \[[`a5dd057540`](https://github.com/nodejs/node/commit/a5dd057540)] - **doc**: deprecate `util.toUSVString` (Yagiz Nizipli) [#49725](https://github.com/nodejs/node/pull/49725)
+* \[[`774c1cfd52`](https://github.com/nodejs/node/commit/774c1cfd52)] - **doc**: add missing function call to example for `util.promisify` (Jungku Lee) [#49719](https://github.com/nodejs/node/pull/49719)
+* \[[`fe78a34845`](https://github.com/nodejs/node/commit/fe78a34845)] - **doc**: update output of example in `mimeParams.set()` (Deokjin Kim) [#49718](https://github.com/nodejs/node/pull/49718)
+* \[[`4175ea33bd`](https://github.com/nodejs/node/commit/4175ea33bd)] - **doc**: add missed `inspect` with numericSeparator to example (Deokjin Kim) [#49717](https://github.com/nodejs/node/pull/49717)
+* \[[`3a88571972`](https://github.com/nodejs/node/commit/3a88571972)] - **doc**: fix history comments (Antoine du Hamel) [#49701](https://github.com/nodejs/node/pull/49701)
+* \[[`db4ab1ccbb`](https://github.com/nodejs/node/commit/db4ab1ccbb)] - **doc**: add missing history info for `import.meta.resolve` (Antoine du Hamel) [#49700](https://github.com/nodejs/node/pull/49700)
+* \[[`a304d1ee19`](https://github.com/nodejs/node/commit/a304d1ee19)] - **doc**: link maintaining deps to pull-request.md (Marco Ippolito) [#49716](https://github.com/nodejs/node/pull/49716)
+* \[[`35294486ad`](https://github.com/nodejs/node/commit/35294486ad)] - **doc**: fix print results in `events` (Jungku Lee) [#49548](https://github.com/nodejs/node/pull/49548)
+* \[[`9f0b0e15c9`](https://github.com/nodejs/node/commit/9f0b0e15c9)] - **doc**: alphabetize cli.md sections (Geoffrey Booth) [#49668](https://github.com/nodejs/node/pull/49668)
+* \[[`7b6a73172f`](https://github.com/nodejs/node/commit/7b6a73172f)] - **doc**: deprecate calling `promisify` on a function that returns a promise (Antoine du Hamel) [#49647](https://github.com/nodejs/node/pull/49647)
+* \[[`d316b32fff`](https://github.com/nodejs/node/commit/d316b32fff)] - **doc**: update `corepack.md` to account for 0.20.0 changes (Antoine du Hamel) [#49486](https://github.com/nodejs/node/pull/49486)
+* \[[`c2eac7dc7c`](https://github.com/nodejs/node/commit/c2eac7dc7c)] - **doc**: remove `@anonrig` from performance initiative (Yagiz Nizipli) [#49641](https://github.com/nodejs/node/pull/49641)
+* \[[`3d839fbf87`](https://github.com/nodejs/node/commit/3d839fbf87)] - **doc**: mark Node.js 16 as End-of-Life (Richard Lau) [#49651](https://github.com/nodejs/node/pull/49651)
+* \[[`53fb5aead8`](https://github.com/nodejs/node/commit/53fb5aead8)] - **doc**: save user preference for JS flavor (Vidar Eldøy) [#49526](https://github.com/nodejs/node/pull/49526)
+* \[[`e3594d5658`](https://github.com/nodejs/node/commit/e3594d5658)] - **doc**: update documentation for node:process warning (Shubham Pandey) [#49517](https://github.com/nodejs/node/pull/49517)
+* \[[`8e033c3963`](https://github.com/nodejs/node/commit/8e033c3963)] - **doc**: rename possibly confusing variable and CSS class (Antoine du Hamel) [#49536](https://github.com/nodejs/node/pull/49536)
+* \[[`d0e0eb4bb3`](https://github.com/nodejs/node/commit/d0e0eb4bb3)] - **doc**: update outdated history info (Antoine du Hamel) [#49530](https://github.com/nodejs/node/pull/49530)
+* \[[`b4724e2e3a`](https://github.com/nodejs/node/commit/b4724e2e3a)] - **doc**: close a parenthesis (Sébastien Règne) [#49525](https://github.com/nodejs/node/pull/49525)
+* \[[`0471c5798e`](https://github.com/nodejs/node/commit/0471c5798e)] - **doc**: cast GetInternalField() return type to v8::Value in addons.md (Joyee Cheung) [#49439](https://github.com/nodejs/node/pull/49439)
+* \[[`9f8bea3dda`](https://github.com/nodejs/node/commit/9f8bea3dda)] - **doc**: fix documentation for input option in child\_process (Ariel Weiss) [#49481](https://github.com/nodejs/node/pull/49481)
+* \[[`f3fea92f8a`](https://github.com/nodejs/node/commit/f3fea92f8a)] - **doc**: fix missing imports in `test.run` code examples (Oshri Asulin) [#49489](https://github.com/nodejs/node/pull/49489)
+* \[[`e426b77b67`](https://github.com/nodejs/node/commit/e426b77b67)] - **doc**: fix documentation for fs.createWriteStream highWaterMark option (Mert Can Altın) [#49456](https://github.com/nodejs/node/pull/49456)
+* \[[`2b119108ff`](https://github.com/nodejs/node/commit/2b119108ff)] - **doc**: updated releasers instructions for node.js website (Claudio W) [#49427](https://github.com/nodejs/node/pull/49427)
+* \[[`b9d4a80183`](https://github.com/nodejs/node/commit/b9d4a80183)] - **doc**: edit `import.meta.resolve` documentation (Antoine du Hamel) [#49247](https://github.com/nodejs/node/pull/49247)
+* \[[`ddf0e17a95`](https://github.com/nodejs/node/commit/ddf0e17a95)] - **doc,tools**: switch to `@node-core/utils` (Michaël Zasso) [#49851](https://github.com/nodejs/node/pull/49851)
+* \[[`142e256fc5`](https://github.com/nodejs/node/commit/142e256fc5)] - **errors**: improve classRegExp in errors.js (Uzlopak) [#49643](https://github.com/nodejs/node/pull/49643)
+* \[[`6377f1bce2`](https://github.com/nodejs/node/commit/6377f1bce2)] - **errors**: use `determineSpecificType` in more error messages (Antoine du Hamel) [#49580](https://github.com/nodejs/node/pull/49580)
+* \[[`05f0fcb4c4`](https://github.com/nodejs/node/commit/05f0fcb4c4)] - **esm**: identify parent importing a url with invalid host (Jacob Smith) [#49736](https://github.com/nodejs/node/pull/49736)
+* \[[`8a6f5fb8f3`](https://github.com/nodejs/node/commit/8a6f5fb8f3)] - **esm**: fix return type of `import.meta.resolve` (Antoine du Hamel) [#49698](https://github.com/nodejs/node/pull/49698)
+* \[[`a6140f1b8c`](https://github.com/nodejs/node/commit/a6140f1b8c)] - **esm**: update loaders warning (Geoffrey Booth) [#49633](https://github.com/nodejs/node/pull/49633)
+* \[[`521a9327e0`](https://github.com/nodejs/node/commit/521a9327e0)] - **esm**: fix support for `URL` instances in `register` (Antoine du Hamel) [#49655](https://github.com/nodejs/node/pull/49655)
+* \[[`3a9ea0925a`](https://github.com/nodejs/node/commit/3a9ea0925a)] - **esm**: clarify ERR\_REQUIRE\_ESM errors (Daniel Compton) [#49521](https://github.com/nodejs/node/pull/49521)
+* \[[`1beefd5f16`](https://github.com/nodejs/node/commit/1beefd5f16)] - **esm**: set all hooks as release candidate (Geoffrey Booth) [#49597](https://github.com/nodejs/node/pull/49597)
+* \[[`be48267888`](https://github.com/nodejs/node/commit/be48267888)] - **esm**: remove return value for `Module.register` (Antoine du Hamel) [#49529](https://github.com/nodejs/node/pull/49529)
+* \[[`e74a075124`](https://github.com/nodejs/node/commit/e74a075124)] - **esm**: refactor test-esm-loader-resolve-type (Geoffrey Booth) [#49493](https://github.com/nodejs/node/pull/49493)
+* \[[`17823b3533`](https://github.com/nodejs/node/commit/17823b3533)] - **esm**: refactor test-esm-named-exports (Geoffrey Booth) [#49493](https://github.com/nodejs/node/pull/49493)
+* \[[`f34bd15ac1`](https://github.com/nodejs/node/commit/f34bd15ac1)] - **esm**: refactor mocking test (Geoffrey Booth) [#49465](https://github.com/nodejs/node/pull/49465)
+* \[[`9363179b39`](https://github.com/nodejs/node/commit/9363179b39)] - **fs**: replace `SetMethodNoSideEffect` in node\_file (CanadaHonk) [#49857](https://github.com/nodejs/node/pull/49857)
+* \[[`e82e46ce81`](https://github.com/nodejs/node/commit/e82e46ce81)] - **fs**: improve error performance for `unlinkSync` (CanadaHonk) [#49856](https://github.com/nodejs/node/pull/49856)
+* \[[`07d05185e2`](https://github.com/nodejs/node/commit/07d05185e2)] - **fs**: improve `readFileSync` with file descriptors (Yagiz Nizipli) [#49691](https://github.com/nodejs/node/pull/49691)
+* \[[`835f9fe7b9`](https://github.com/nodejs/node/commit/835f9fe7b9)] - **fs**: fix file descriptor validator (Yagiz Nizipli) [#49752](https://github.com/nodejs/node/pull/49752)
+* \[[`b618fe262f`](https://github.com/nodejs/node/commit/b618fe262f)] - **fs**: improve error performance of `opendirSync` (Yagiz Nizipli) [#49705](https://github.com/nodejs/node/pull/49705)
+* \[[`938471ef55`](https://github.com/nodejs/node/commit/938471ef55)] - **fs**: improve error performance of sync methods (Yagiz Nizipli) [#49593](https://github.com/nodejs/node/pull/49593)
+* \[[`db3fc6d087`](https://github.com/nodejs/node/commit/db3fc6d087)] - **fs**: fix readdir and opendir recursive with unknown file types (William Marlow) [#49603](https://github.com/nodejs/node/pull/49603)
+* \[[`0f020ed22d`](https://github.com/nodejs/node/commit/0f020ed22d)] - **gyp**: put cctest filenames in variables (Cheng Zhao) [#49178](https://github.com/nodejs/node/pull/49178)
+* \[[`0ce1e94d12`](https://github.com/nodejs/node/commit/0ce1e94d12)] - **lib**: update encoding sets in `WHATWG API` (Jungku Lee) [#49610](https://github.com/nodejs/node/pull/49610)
+* \[[`efd6815a7a`](https://github.com/nodejs/node/commit/efd6815a7a)] - **lib**: fix `internalBinding` typings (Yagiz Nizipli) [#49742](https://github.com/nodejs/node/pull/49742)
+* \[[`1287d5b74e`](https://github.com/nodejs/node/commit/1287d5b74e)] - **lib**: allow byob reader for 'blob.stream()' (Debadree Chatterjee) [#49713](https://github.com/nodejs/node/pull/49713)
+* \[[`bbc710522d`](https://github.com/nodejs/node/commit/bbc710522d)] - **lib**: reset the cwd cache before execution (Maël Nison) [#49684](https://github.com/nodejs/node/pull/49684)
+* \[[`f62d649e4d`](https://github.com/nodejs/node/commit/f62d649e4d)] - **lib**: use internal `fileURLToPath` (Deokjin Kim) [#49558](https://github.com/nodejs/node/pull/49558)
+* \[[`e515046941`](https://github.com/nodejs/node/commit/e515046941)] - **lib**: use internal `pathToFileURL` (Livia Medeiros) [#49553](https://github.com/nodejs/node/pull/49553)
+* \[[`00608e8070`](https://github.com/nodejs/node/commit/00608e8070)] - **lib**: check SharedArrayBuffer availability in freeze\_intrinsics.js (Milan Burda) [#49482](https://github.com/nodejs/node/pull/49482)
+* \[[`8bfbe7079c`](https://github.com/nodejs/node/commit/8bfbe7079c)] - **meta**: fix linter error (Antoine du Hamel) [#49755](https://github.com/nodejs/node/pull/49755)
+* \[[`58f7a9e096`](https://github.com/nodejs/node/commit/58f7a9e096)] - **meta**: add primordials strategic initiative (Benjamin Gruenbaum) [#49706](https://github.com/nodejs/node/pull/49706)
+* \[[`5366027756`](https://github.com/nodejs/node/commit/5366027756)] - **meta**: bump github/codeql-action from 2.21.2 to 2.21.5 (dependabot\[bot]) [#49438](https://github.com/nodejs/node/pull/49438)
+* \[[`fe26b74082`](https://github.com/nodejs/node/commit/fe26b74082)] - **meta**: bump rtCamp/action-slack-notify from 2.2.0 to 2.2.1 (dependabot\[bot]) [#49437](https://github.com/nodejs/node/pull/49437)
+* \[[`96009289fb`](https://github.com/nodejs/node/commit/96009289fb)] - **node-api**: enable uncaught exceptions policy by default (Chengzhong Wu) [#49313](https://github.com/nodejs/node/pull/49313)
+* \[[`f40b5ede02`](https://github.com/nodejs/node/commit/f40b5ede02)] - **perf\_hooks**: reduce overhead of new performance\_entries (Vinicius Lourenço) [#49803](https://github.com/nodejs/node/pull/49803)
+* \[[`ad043bac31`](https://github.com/nodejs/node/commit/ad043bac31)] - **process**: add custom dir support for heapsnapshot-signal (Jithil P Ponnan) [#47854](https://github.com/nodejs/node/pull/47854)
+* \[[`8a7c10194c`](https://github.com/nodejs/node/commit/8a7c10194c)] - **repl**: don't accumulate excess indentation in .load (Daniel X Moore) [#49461](https://github.com/nodejs/node/pull/49461)
+* \[[`10a2adeed5`](https://github.com/nodejs/node/commit/10a2adeed5)] - **src**: improve error message when ICU data cannot be initialized (Joyee Cheung) [#49666](https://github.com/nodejs/node/pull/49666)
+* \[[`ce37688bac`](https://github.com/nodejs/node/commit/ce37688bac)] - **src**: remove unnecessary todo (Rafael Gonzaga) [#49227](https://github.com/nodejs/node/pull/49227)
+* \[[`f611583b71`](https://github.com/nodejs/node/commit/f611583b71)] - **src**: use SNAPSHOT\_SERDES to log snapshot ser/deserialization (Joyee Cheung) [#49637](https://github.com/nodejs/node/pull/49637)
+* \[[`a597cb8457`](https://github.com/nodejs/node/commit/a597cb8457)] - **src**: port Pipe to uv\_pipe\_bind2, uv\_pipe\_connect2 (Geoff Goodman) [#49667](https://github.com/nodejs/node/pull/49667)
+* \[[`fb21062338`](https://github.com/nodejs/node/commit/fb21062338)] - **src**: set --rehash-snapshot explicitly (Joyee Cheung) [#49556](https://github.com/nodejs/node/pull/49556)
+* \[[`14ece0aa76`](https://github.com/nodejs/node/commit/14ece0aa76)] - **src**: allow embedders to override NODE\_MODULE\_VERSION (Cheng Zhao) [#49279](https://github.com/nodejs/node/pull/49279)
+* \[[`4b5e23c71b`](https://github.com/nodejs/node/commit/4b5e23c71b)] - **src**: set ModuleWrap internal fields only once (Joyee Cheung) [#49391](https://github.com/nodejs/node/pull/49391)
+* \[[`2d3f5c7cab`](https://github.com/nodejs/node/commit/2d3f5c7cab)] - **src**: fix fs\_type\_to\_name default value (Mustafa Ateş Uzun) [#49239](https://github.com/nodejs/node/pull/49239)
+* \[[`cfbcb1059c`](https://github.com/nodejs/node/commit/cfbcb1059c)] - **src**: fix comment on StreamResource (rogertyang) [#49193](https://github.com/nodejs/node/pull/49193)
+* \[[`39fb83ad16`](https://github.com/nodejs/node/commit/39fb83ad16)] - **src**: do not rely on the internal field being default to undefined (Joyee Cheung) [#49413](https://github.com/nodejs/node/pull/49413)
+* \[[`12fe6a0bfd`](https://github.com/nodejs/node/commit/12fe6a0bfd)] - **stream**: use bitmap in writable state (Raz Luvaton) [#49834](https://github.com/nodejs/node/pull/49834)
+* \[[`0ccd4638ac`](https://github.com/nodejs/node/commit/0ccd4638ac)] - **stream**: use bitmap in readable state (Benjamin Gruenbaum) [#49745](https://github.com/nodejs/node/pull/49745)
+* \[[`b29d927010`](https://github.com/nodejs/node/commit/b29d927010)] - **stream**: improve readable webstream `pipeTo` (Raz Luvaton) [#49690](https://github.com/nodejs/node/pull/49690)
+* \[[`7c5e322346`](https://github.com/nodejs/node/commit/7c5e322346)] - **stream**: improve webstream readable async iterator performance (Raz Luvaton) [#49662](https://github.com/nodejs/node/pull/49662)
+* \[[`1a180342ec`](https://github.com/nodejs/node/commit/1a180342ec)] - **test**: add os setPriority, getPriority test coverage (Wael) [#38771](https://github.com/nodejs/node/pull/38771)
+* \[[`d44a812101`](https://github.com/nodejs/node/commit/d44a812101)] - **test**: deflake test-runner-output (Moshe Atlow) [#49878](https://github.com/nodejs/node/pull/49878)
+* \[[`d2bcdcb177`](https://github.com/nodejs/node/commit/d2bcdcb177)] - **test**: mark test-runner-output as flaky (Joyee Cheung) [#49854](https://github.com/nodejs/node/pull/49854)
+* \[[`eded29c522`](https://github.com/nodejs/node/commit/eded29c522)] - **test**: use mustSucceed instead of mustCall (SiddharthDevulapalli) [#49788](https://github.com/nodejs/node/pull/49788)
+* \[[`3db9b40081`](https://github.com/nodejs/node/commit/3db9b40081)] - **test**: refactor test-readline-async-iterators into a benchmark (Shubham Pandey) [#49237](https://github.com/nodejs/node/pull/49237)
+* \[[`2cc5ad7859`](https://github.com/nodejs/node/commit/2cc5ad7859)] - _**Revert**_ "**test**: mark test-http-regr-gh-2928 as flaky" (Luigi Pinca) [#49708](https://github.com/nodejs/node/pull/49708)
+* \[[`e5185b053c`](https://github.com/nodejs/node/commit/e5185b053c)] - **test**: use `fs.constants` for `fs.access` constants (Livia Medeiros) [#49685](https://github.com/nodejs/node/pull/49685)
+* \[[`b9e5b43462`](https://github.com/nodejs/node/commit/b9e5b43462)] - **test**: deflake test-http-regr-gh-2928 (Luigi Pinca) [#49574](https://github.com/nodejs/node/pull/49574)
+* \[[`1fffda504e`](https://github.com/nodejs/node/commit/1fffda504e)] - **test**: fix argument computation in embedtest (Joyee Cheung) [#49506](https://github.com/nodejs/node/pull/49506)
+* \[[`6e56f2db52`](https://github.com/nodejs/node/commit/6e56f2db52)] - **test**: skip test-child-process-stdio-reuse-readable-stdio on Windows (Joyee Cheung) [#49621](https://github.com/nodejs/node/pull/49621)
+* \[[`ab3afb330d`](https://github.com/nodejs/node/commit/ab3afb330d)] - **test**: mark test-runner-watch-mode as flaky (Joyee Cheung) [#49627](https://github.com/nodejs/node/pull/49627)
+* \[[`185d9b50db`](https://github.com/nodejs/node/commit/185d9b50db)] - **test**: deflake test-tls-socket-close (Luigi Pinca) [#49575](https://github.com/nodejs/node/pull/49575)
+* \[[`c70c74a9e6`](https://github.com/nodejs/node/commit/c70c74a9e6)] - **test**: show more info on failure in test-cli-syntax-require.js (Joyee Cheung) [#49561](https://github.com/nodejs/node/pull/49561)
+* \[[`ed7c6d1114`](https://github.com/nodejs/node/commit/ed7c6d1114)] - **test**: mark test-http-regr-gh-2928 as flaky (Joyee Cheung) [#49565](https://github.com/nodejs/node/pull/49565)
+* \[[`3599eebab9`](https://github.com/nodejs/node/commit/3599eebab9)] - **test**: use spawnSyncAndExitWithoutError in sea tests (Joyee Cheung) [#49543](https://github.com/nodejs/node/pull/49543)
+* \[[`f79b153e89`](https://github.com/nodejs/node/commit/f79b153e89)] - **test**: use spawnSyncAndExitWithoutError in test/common/sea.js (Joyee Cheung) [#49543](https://github.com/nodejs/node/pull/49543)
+* \[[`c079c73769`](https://github.com/nodejs/node/commit/c079c73769)] - **test**: use setImmediate() in test-heapdump-shadowrealm.js (Joyee Cheung) [#49573](https://github.com/nodejs/node/pull/49573)
+* \[[`667a92493c`](https://github.com/nodejs/node/commit/667a92493c)] - **test**: skip test-child-process-pipe-dataflow\.js on Windows (Joyee Cheung) [#49563](https://github.com/nodejs/node/pull/49563)
+* \[[`91af0a9a3c`](https://github.com/nodejs/node/commit/91af0a9a3c)] - _**Revert**_ "**test**: ignore the copied entry\_point.c" (Chengzhong Wu) [#49515](https://github.com/nodejs/node/pull/49515)
+* \[[`567afc71b8`](https://github.com/nodejs/node/commit/567afc71b8)] - **test**: avoid copying test source files (Chengzhong Wu) [#49515](https://github.com/nodejs/node/pull/49515)
+* \[[`ced25a976d`](https://github.com/nodejs/node/commit/ced25a976d)] - **test**: increase coverage of `Module.register` and `initialize` hook (Antoine du Hamel) [#49532](https://github.com/nodejs/node/pull/49532)
+* \[[`be02fbdb8a`](https://github.com/nodejs/node/commit/be02fbdb8a)] - **test**: isolate `globalPreload` tests (Geoffrey Booth) [#49545](https://github.com/nodejs/node/pull/49545)
+* \[[`f214428845`](https://github.com/nodejs/node/commit/f214428845)] - **test**: split test-crypto-dh to avoid timeout on slow machines in the CI (Joyee Cheung) [#49492](https://github.com/nodejs/node/pull/49492)
+* \[[`3987094569`](https://github.com/nodejs/node/commit/3987094569)] - **test**: make `test-dotenv-node-options` locale-independent (Livia Medeiros) [#49470](https://github.com/nodejs/node/pull/49470)
+* \[[`34c1741792`](https://github.com/nodejs/node/commit/34c1741792)] - **test**: add test for urlstrings usage in `node:fs` (Livia Medeiros) [#49471](https://github.com/nodejs/node/pull/49471)
+* \[[`c3c6c4f007`](https://github.com/nodejs/node/commit/c3c6c4f007)] - **test**: make test-worker-prof more robust (Joyee Cheung) [#49274](https://github.com/nodejs/node/pull/49274)
+* \[[`843df1a4da`](https://github.com/nodejs/node/commit/843df1a4da)] - **test,crypto**: update WebCryptoAPI WPT (Filip Skokan) [#49714](https://github.com/nodejs/node/pull/49714)
+* \[[`80b342cc38`](https://github.com/nodejs/node/commit/80b342cc38)] - **test\_runner**: accept `testOnly` in `run` (Moshe Atlow) [#49753](https://github.com/nodejs/node/pull/49753)
+* \[[`76865515b9`](https://github.com/nodejs/node/commit/76865515b9)] - **test\_runner**: fix test runner watch mode when no positional arguments (Moshe Atlow) [#49578](https://github.com/nodejs/node/pull/49578)
+* \[[`17a05b141d`](https://github.com/nodejs/node/commit/17a05b141d)] - **test\_runner**: add junit reporter (Moshe Atlow) [#49614](https://github.com/nodejs/node/pull/49614)
+* \[[`5672e38457`](https://github.com/nodejs/node/commit/5672e38457)] - **test\_runner**: add jsdocs to mock.js (Caio Borghi) [#49555](https://github.com/nodejs/node/pull/49555)
+* \[[`b4d42a8f2b`](https://github.com/nodejs/node/commit/b4d42a8f2b)] - **test\_runner**: fix invalid timer call (Erick Wendel) [#49477](https://github.com/nodejs/node/pull/49477)
+* \[[`f755e6786b`](https://github.com/nodejs/node/commit/f755e6786b)] - **test\_runner**: add jsdocs to MockTimers (Erick Wendel) [#49476](https://github.com/nodejs/node/pull/49476)
+* \[[`e7285d4bf0`](https://github.com/nodejs/node/commit/e7285d4bf0)] - **test\_runner**: fix typescript coverage (Moshe Atlow) [#49406](https://github.com/nodejs/node/pull/49406)
+* \[[`6db0d3d883`](https://github.com/nodejs/node/commit/6db0d3d883)] - **tools**: support updating @reporters/github manually (Moshe Atlow) [#49871](https://github.com/nodejs/node/pull/49871)
+* \[[`5ac6722031`](https://github.com/nodejs/node/commit/5ac6722031)] - **tools**: skip ruff on tools/node\_modules (Moshe Atlow) [#49838](https://github.com/nodejs/node/pull/49838)
+* \[[`462228bd24`](https://github.com/nodejs/node/commit/462228bd24)] - **tools**: fix uvwasi updater (Michael Dawson) [#49682](https://github.com/nodejs/node/pull/49682)
+* \[[`ff81bfb958`](https://github.com/nodejs/node/commit/ff81bfb958)] - **tools**: update lint-md-dependencies to rollup\@3.29.2 (Node.js GitHub Bot) [#49679](https://github.com/nodejs/node/pull/49679)
+* \[[`08ffc6344c`](https://github.com/nodejs/node/commit/08ffc6344c)] - **tools**: restrict internal code from using public `url` module (LiviaMedeiros) [#49590](https://github.com/nodejs/node/pull/49590)
+* \[[`728ebf6c97`](https://github.com/nodejs/node/commit/728ebf6c97)] - **tools**: update eslint to 8.49.0 (Node.js GitHub Bot) [#49586](https://github.com/nodejs/node/pull/49586)
+* \[[`20d038ffb1`](https://github.com/nodejs/node/commit/20d038ffb1)] - **tools**: update lint-md-dependencies to rollup\@3.29.0 unified\@11.0.3 (Node.js GitHub Bot) [#49584](https://github.com/nodejs/node/pull/49584)
+* \[[`210c15bd12`](https://github.com/nodejs/node/commit/210c15bd12)] - **tools**: allow passing absolute path of config.gypi in js2c (Cheng Zhao) [#49162](https://github.com/nodejs/node/pull/49162)
+* \[[`e341efe173`](https://github.com/nodejs/node/commit/e341efe173)] - **tools**: configure never-stale label correctly (Michaël Zasso) [#49498](https://github.com/nodejs/node/pull/49498)
+* \[[`a8a8a498ce`](https://github.com/nodejs/node/commit/a8a8a498ce)] - **tools**: update doc dependencies (Node.js GitHub Bot) [#49467](https://github.com/nodejs/node/pull/49467)
+* \[[`ac06607f9e`](https://github.com/nodejs/node/commit/ac06607f9e)] - **typings**: fix missing property in `ExportedHooks` (Antoine du Hamel) [#49567](https://github.com/nodejs/node/pull/49567)
+* \[[`7d91a73764`](https://github.com/nodejs/node/commit/7d91a73764)] - **url**: improve invalid url performance (Yagiz Nizipli) [#49692](https://github.com/nodejs/node/pull/49692)
+* \[[`7c2060cfac`](https://github.com/nodejs/node/commit/7c2060cfac)] - **util**: add `getCwdSafe` internal util fn (João Lenon) [#48434](https://github.com/nodejs/node/pull/48434)
+* \[[`c23c60f545`](https://github.com/nodejs/node/commit/c23c60f545)] - **zlib**: disable CRC32 SIMD optimization (Luigi Pinca) [#49511](https://github.com/nodejs/node/pull/49511)
+
## 2023-09-18, Version 20.7.0 (Current), @UlisesGascon
diff --git a/doc/contributing/backporting-to-release-lines.md b/doc/contributing/backporting-to-release-lines.md
index d9dea5dbe2b06f..851e4e255442d1 100644
--- a/doc/contributing/backporting-to-release-lines.md
+++ b/doc/contributing/backporting-to-release-lines.md
@@ -40,10 +40,26 @@ For the following labels, the `N` in `vN.x` refers to the major release number.
## How to submit a backport pull request
-For the following steps, let's assume that a backport is needed for the v10.x
-release line. All commands will use the `v10.x-staging` branch as the target
-branch. In order to submit a backport pull request to another branch, simply
-replace that with the staging branch for the targeted release line.
+For the following steps, let's assume that you need to backport PR `123`
+to the v20.x release line. All commands will use the `v20.x-staging` branch
+as the target branch. In order to submit a backport pull request to another
+branch, simply replace that with the staging branch for the targeted release
+line.
+
+### Automated
+
+1. Make sure you have [`@node-core/utils`][] installed
+
+2. Run the [`git node backport`][] command
+
+```bash
+# Backport PR 123 to v20.x-staging
+git node backport 123 --to=20
+```
+
+3. Jump to step 5 in the Manual section below
+
+### Manually
1. Checkout the staging branch for the targeted release line.
@@ -56,10 +72,10 @@ replace that with the staging branch for the targeted release line.
# the origin remote points to your fork, and the upstream remote points
# to git@github.com:nodejs/node.git
cd $NODE_DIR
- # If v10.x-staging is checked out `pull` should be used instead of `fetch`
- git fetch upstream v10.x-staging:v10.x-staging -f
+ # If v20.x-staging is checked out `pull` should be used instead of `fetch`
+ git fetch upstream v20.x-staging:v20.x-staging -f
# Assume we want to backport PR #10157
- git checkout -b backport-10157-to-v10.x v10.x-staging
+ git checkout -b backport-10157-to-v20.x v20.x-staging
# Ensure there are no test artifacts from previous builds
# Note that this command deletes all files and directories
# not under revision control below the ./test directory.
@@ -93,10 +109,10 @@ replace that with the staging branch for the targeted release line.
8. Push the changes to your fork.
9. Open a pull request:
- 1. Be sure to target the `v10.x-staging` branch in the pull request.
+ 1. Be sure to target the `v20.x-staging` branch in the pull request.
2. Include the backport target in the pull request title in the following
- format: `[v10.x backport] `.
- Example: `[v10.x backport] process: improve performance of nextTick`
+ format: `[v20.x backport] `.
+ Example: `[v20.x backport] process: improve performance of nextTick`
3. Check the checkbox labeled "Allow edits and access to secrets by
maintainers".
4. In the description add a reference to the original pull request.
@@ -105,15 +121,17 @@ replace that with the staging branch for the targeted release line.
6. Run a [`node-test-pull-request`][] CI job (with `REBASE_ONTO` set to the
default ``)
-10. Replace the `backport-requested-v10.x` label on the original pull request
- with `backport-open-v10.x`.
+10. Replace the `backport-requested-v20.x` label on the original pull request
+ with `backport-open-v20.x`.
11. If during the review process conflicts arise, use the following to rebase:
- `git pull --rebase upstream v10.x-staging`
+ `git pull --rebase upstream v20.x-staging`
-After the pull request lands, replace the `backport-open-v10.x` label on the
-original pull request with `backported-to-v10.x`.
+After the pull request lands, replace the `backport-open-v20.x` label on the
+original pull request with `backported-to-v20.x`.
[Release Plan]: https://github.com/nodejs/Release#release-plan
[Release Schedule]: https://github.com/nodejs/Release#release-schedule
+[`@node-core/utils`]: https://github.com/nodejs/node-core-utils
+[`git node backport`]: https://github.com/nodejs/node-core-utils/blob/main/docs/git-node.md#git-node-backport
[`node-test-pull-request`]: https://ci.nodejs.org/job/node-test-pull-request/build
diff --git a/doc/contributing/collaborator-guide.md b/doc/contributing/collaborator-guide.md
index a4218acc7d4fd0..2ecf14a082dc3b 100644
--- a/doc/contributing/collaborator-guide.md
+++ b/doc/contributing/collaborator-guide.md
@@ -555,22 +555,23 @@ See the [commit queue guide][commit-queue.md].
### Using `git-node`
-In most cases, using [the `git-node` command][git-node] of [`node-core-utils`][]
-is enough to land a pull request. If you discover a problem when using
-this tool, please file an issue [to the issue tracker][node-core-utils-issues].
+In most cases, using [the `git-node` command][git-node] of
+[`@node-core/utils`][] is enough to land a pull request. If you discover a
+problem when using this tool, please file an issue
+[to the issue tracker][node-core-utils-issues].
Quick example:
```bash
-npm install -g node-core-utils
+npm install -g @node-core/utils
git node land $PRID
```
-To use `node-core-utils`, you will need a GitHub access token. If you do not
-have one, `node-core-utils` will create one for you the first time you use it.
+To use `@node-core/utils`, you will need a GitHub access token. If you do not
+have one, `@node-core/utils` will create one for you the first time you use it.
To do this, it will ask for your GitHub password and two-factor authentication
code. If you wish to create the token yourself in advance, see
-[the `node-core-utils` guide][node-core-utils-credentials].
+[the `@node-core/utils` guide][node-core-utils-credentials].
### Technical HOWTO
@@ -959,7 +960,7 @@ need to be attached anymore, as only important bugfixes will be included.
[TSC]: https://github.com/nodejs/TSC
[`--pending-deprecation`]: ../api/cli.md#--pending-deprecation
[`--throw-deprecation`]: ../api/cli.md#--throw-deprecation
-[`node-core-utils`]: https://github.com/nodejs/node-core-utils
+[`@node-core/utils`]: https://github.com/nodejs/node-core-utils
[backporting guide]: backporting-to-release-lines.md
[commit message guidelines]: pull-requests.md#commit-message-guidelines
[commit-example]: https://github.com/nodejs/node/commit/b636ba8186
diff --git a/doc/contributing/commit-queue.md b/doc/contributing/commit-queue.md
index 4730d0889e99aa..cece9ea84e94f8 100644
--- a/doc/contributing/commit-queue.md
+++ b/doc/contributing/commit-queue.md
@@ -7,8 +7,8 @@ _tl;dr: You can land pull requests by adding the `commit-queue` label to it._
Commit Queue is an experimental feature for the project which simplifies the
landing process by automating it via GitHub Actions. With it, collaborators can
land pull requests by adding the `commit-queue` label to a PR. All
-checks will run via node-core-utils, and if the pull request is ready to land,
-the Action will rebase it and push to `main`.
+checks will run via `@node-core/utils`, and if the pull request is ready to
+land, the Action will rebase it and push to `main`.
This document gives an overview of how the Commit Queue works, as well as
implementation details, reasoning for design choices, and current limitations.
@@ -76,7 +76,7 @@ reasons:
commit, meaning we wouldn't be able to use it for already opened PRs
without rebasing them first.
-`node-core-utils` is configured with a personal token and
+`@node-core/utils` is configured with a personal token and
a Jenkins token from
[@nodejs-github-bot](https://github.com/nodejs/github-bot).
`octokit/graphql-action` is used to fetch all pull requests with the
diff --git a/doc/contributing/maintaining/maintaining-V8.md b/doc/contributing/maintaining/maintaining-V8.md
index 0fe9b393fc0f15..740af7f228f694 100644
--- a/doc/contributing/maintaining/maintaining-V8.md
+++ b/doc/contributing/maintaining/maintaining-V8.md
@@ -122,7 +122,7 @@ some manual steps and is recommended.
Here are the steps for the bug mentioned above:
-1. Install `git-node` by installing [`node-core-utils`][].
+1. Install `git-node` by installing [`@node-core/utils`][].
2. Install the prerequisites for [`git-node-v8`][].
3. Find the commit hash linked-to in the issue (in this case a51f429).
4. Checkout a branch off the appropriate _vY.x-staging_ branch (e.g.
@@ -277,7 +277,7 @@ that Node.js may be floating (or else cause a merge conflict).
#### Applying minor updates with `git-node` (recommended)
-1. Install [`git-node`][] by installing [`node-core-utils`][].
+1. Install [`git-node`][] by installing [`@node-core/utils`][].
2. Install the prerequisites for [`git-node-v8`][].
3. Run `git node v8 minor` to apply a minor update.
@@ -384,8 +384,8 @@ This would require some tooling to:
[V8MergingPatching]: https://v8.dev/docs/merge-patch
[V8TemplateMergeRequest]: https://bugs.chromium.org/p/v8/issues/entry?template=Node.js%20merge%20request
[V8TemplateUpstreamBug]: https://bugs.chromium.org/p/v8/issues/entry?template=Node.js%20upstream%20bug
+[`@node-core/utils`]: https://github.com/nodejs/node-core-utils#Install
[`git-node-v8-backport`]: https://github.com/nodejs/node-core-utils/blob/main/docs/git-node.md#git-node-v8-backport-sha
[`git-node-v8-minor`]: https://github.com/nodejs/node-core-utils/blob/main/docs/git-node.md#git-node-v8-minor
[`git-node-v8`]: https://github.com/nodejs/node-core-utils/blob/HEAD/docs/git-node.md#git-node-v8
[`git-node`]: https://github.com/nodejs/node-core-utils/blob/HEAD/docs/git-node.md#git-node-v8
-[`node-core-utils`]: https://github.com/nodejs/node-core-utils#Install
diff --git a/doc/contributing/maintaining/maintaining-dependencies.md b/doc/contributing/maintaining/maintaining-dependencies.md
index 007a27709d829b..6f84a92ebb9385 100644
--- a/doc/contributing/maintaining/maintaining-dependencies.md
+++ b/doc/contributing/maintaining/maintaining-dependencies.md
@@ -15,20 +15,20 @@ This a list of all the dependencies:
* [c-ares 1.19.0][]
* [cjs-module-lexer 1.2.2][]
* [corepack][]
-* [googletest 7e33b6a][]
+* [googletest d1467f5][]
* [histogram 0.11.8][]
* [icu-small 73.2][]
* [libuv 1.46.0][]
* [llhttp 8.1.0][]
* [minimatch 9.0.3][]
-* [nghttp2 1.55.1][]
+* [nghttp2 1.56.0][]
* [nghttp3 0.7.0][]
* [ngtcp2 0.8.1][]
* [npm 9.6.7][]
* [openssl 3.0.8][]
* [postject 1.0.0-alpha.6][]
* [simdutf 3.2.17][]
-* [undici 5.23.0][]
+* [undici 5.25.2][]
* [uvwasi 0.0.16][]
* [V8 11.3.244.8][]
* [zlib 1.2.13.1-motley-f5fd0ad][]
@@ -189,7 +189,7 @@ In practical terms, Corepack will let you use Yarn and pnpm without having to
install them - just like what currently happens with npm, which is shipped
by Node.js by default.
-### googletest 7e33b6a
+### googletest d1467f5
The [googletest](https://github.com/google/googletest) dependency is Google’s
C++ testing and mocking framework.
@@ -223,7 +223,7 @@ See [maintaining-http][] for more informations.
The [minimatch](https://github.com/isaacs/minimatch) dependency is a
minimal matching utility.
-### nghttp2 1.55.1
+### nghttp2 1.56.0
The [nghttp2](https://github.com/nghttp2/nghttp2) dependency is a C library
implementing HTTP/2 protocol.
@@ -291,7 +291,7 @@ The [postject](https://github.com/nodejs/postject) dependency is used for the
The [simdutf](https://github.com/simdutf/simdutf) dependency is
a C++ library for fast UTF-8 decoding and encoding.
-### undici 5.23.0
+### undici 5.25.2
The [undici](https://github.com/nodejs/undici) dependency is an HTTP/1.1 client,
written from scratch for Node.js..
@@ -326,7 +326,7 @@ performance improvements not currently available in standard zlib.
[cjs-module-lexer 1.2.2]: #cjs-module-lexer-122
[corepack]: #corepack
[dependency-update-action]: ../../../.github/workflows/tools.yml
-[googletest 7e33b6a]: #googletest-7e33b6a
+[googletest d1467f5]: #googletest-d1467f5
[histogram 0.11.8]: #histogram-0118
[icu-small 73.2]: #icu-small-732
[libuv 1.46.0]: #libuv-1460
@@ -338,14 +338,14 @@ performance improvements not currently available in standard zlib.
[maintaining-openssl]: ./maintaining-openssl.md
[maintaining-web-assembly]: ./maintaining-web-assembly.md
[minimatch 9.0.3]: #minimatch-903
-[nghttp2 1.55.1]: #nghttp2-1551
+[nghttp2 1.56.0]: #nghttp2-1560
[nghttp3 0.7.0]: #nghttp3-070
[ngtcp2 0.8.1]: #ngtcp2-081
[npm 9.6.7]: #npm-967
[openssl 3.0.8]: #openssl-308
[postject 1.0.0-alpha.6]: #postject-100-alpha6
[simdutf 3.2.17]: #simdutf-3217
-[undici 5.23.0]: #undici-5230
+[undici 5.25.2]: #undici-5252
[update-openssl-action]: ../../../.github/workflows/update-openssl.yml
[uvwasi 0.0.16]: #uvwasi-0016
[v8 11.3.244.8]: #v8-1132448
diff --git a/doc/contributing/pull-requests.md b/doc/contributing/pull-requests.md
index c35fda734f3173..a2bd63d3572393 100644
--- a/doc/contributing/pull-requests.md
+++ b/doc/contributing/pull-requests.md
@@ -34,9 +34,11 @@
## Dependencies
Node.js has several bundled dependencies in the _deps/_ and the _tools/_
-directories that are not part of the project proper. Changes to files in those
-directories should be sent to their respective projects. Do not send a patch to
-Node.js. We cannot accept such patches.
+directories that are not part of the project proper.
+These are detailed in the [maintaining dependencies][] document.
+Changes to files in those directories should be sent
+to their respective projects.
+Do not send a patch to Node.js. We cannot accept such patches.
In case of doubt, open an issue in the
[issue tracker](https://github.com/nodejs/node/issues/) or contact one of the
@@ -590,6 +592,7 @@ More than one subsystem may be valid for any particular issue or pull request.
[guide for writing tests in Node.js]: writing-tests.md
[hiding-a-comment]: https://help.github.com/articles/managing-disruptive-comments/#hiding-a-comment
[https://ci.nodejs.org/]: https://ci.nodejs.org/
+[maintaining dependencies]: ./maintaining/maintaining-dependencies.md
[nodejs/core-validate-commit]: https://github.com/nodejs/core-validate-commit/blob/main/lib/rules/subsystem.js
[pull request template]: https://raw.githubusercontent.com/nodejs/node/HEAD/.github/PULL_REQUEST_TEMPLATE.md
[running tests]: ../../BUILDING.md#running-tests
diff --git a/doc/contributing/releases.md b/doc/contributing/releases.md
index 0020ec59c8bd02..bf34bff850c678 100644
--- a/doc/contributing/releases.md
+++ b/doc/contributing/releases.md
@@ -563,7 +563,7 @@ ecosystem.
Use `ncu-ci` to compare `vx.x` run (10) and proposal branch (11)
```bash
-npm i -g node-core-utils
+npm i -g @node-core/utils
ncu-ci citgm 10 11
```
@@ -1001,9 +1001,13 @@ This script will use the promoted builds and changelog to generate the post. Run
Refs:
```
+* In order to trigger the CI Checks of the [nodejs.org repository][]; Please
+ attach the `github_actions:pull-request` label to the PR.
+
* Changes to the base branch, `main`, on the [nodejs.org repository][] will
- trigger a new build of nodejs.org so your changes should appear a few minutes
- after pushing.
+ trigger a new build of nodejs.org, so your changes should appear a few minutes
+ after pushing. You can follow the [Deployments](https://github.com/nodejs/nodejs.org/deployments) page
+ to see when the build finishes and gets published.
### 18. Create the release on GitHub
@@ -1048,7 +1052,7 @@ _In whatever form you do this..._
### Marking a release line as LTS
The process of marking a release line as LTS has been automated using
-[node-core-utils](https://github.com/nodejs/node-core-utils).
+[`@node-core/utils`](https://github.com/nodejs/node-core-utils).
Start by checking out the staging branch for the release line that is going to
be marked as LTS, e.g:
@@ -1057,10 +1061,10 @@ be marked as LTS, e.g:
git checkout v1.x-staging
```
-Next, make sure you have **node-core-utils** installed:
+Next, make sure you have **`@node-core/utils`** installed:
```bash
-npm i -g node-core-utils
+npm i -g @node-core/utils
```
Run the prepare LTS release command:
@@ -1106,7 +1110,7 @@ current LTS codename in its release line changelog file.
The `test/parallel/test-process-release.js` file might also need to be updated.
-In case you can not run the automated `node-core-utils` command and you are
+In case you can not run the automated `@node-core/utils` command and you are
currently running these steps manually it's a good idea to refer to previous
LTS proposal PRs and make sure all required changes are covered.
diff --git a/doc/contributing/strategic-initiatives.md b/doc/contributing/strategic-initiatives.md
index b005040d54b479..ca4308daa3ebb8 100644
--- a/doc/contributing/strategic-initiatives.md
+++ b/doc/contributing/strategic-initiatives.md
@@ -6,16 +6,17 @@ agenda to ensure they are active and have the support they need.
## Current initiatives
-| Initiative | Champion | Links |
-| ---------------------- | --------------------------- | --------------------------------------------- |
-| Core Promise APIs | [Antoine du Hamel][aduh95] | |
-| QUIC / HTTP3 | [James M Snell][jasnell] | |
-| Shadow Realm | [Chengzhong Wu][legendecas] | |
-| Startup Snapshot | [Joyee Cheung][joyeecheung] | |
-| V8 Currency | [Michaël Zasso][targos] | |
-| Next-10 | [Michael Dawson][mhdawson] | |
-| Single executable apps | [Darshan Sen][RaisinTen] | |
-| Performance | [Yagiz Nizipli][anonrig] | |
+| Initiative | Champion | Links |
+| ---------------------- | -------------------------------- | ------------------------------------------------- |
+| Core Promise APIs | [Antoine du Hamel][aduh95] | |
+| QUIC / HTTP3 | [James M Snell][jasnell] | |
+| Shadow Realm | [Chengzhong Wu][legendecas] | |
+| Startup Snapshot | [Joyee Cheung][joyeecheung] | |
+| V8 Currency | [Michaël Zasso][targos] | |
+| Next-10 | [Michael Dawson][mhdawson] | |
+| Single executable apps | [Darshan Sen][RaisinTen] | |
+| Performance | | |
+| Primordials | [Benjamin Gruenbaum][benjamingr] | |
List of completed initiatives
@@ -40,7 +41,7 @@ agenda to ensure they are active and have the support they need.
[RaisinTen]: https://github.com/RaisinTen
[aduh95]: https://github.com/aduh95
-[anonrig]: https://github.com/anonrig
+[benjamingr]: https://github.com/benjamingr
[jasnell]: https://github.com/jasnell
[joyeecheung]: https://github.com/joyeecheung
[legendecas]: https://github.com/legendecas
diff --git a/lib/.eslintrc.yaml b/lib/.eslintrc.yaml
index cc4fa1975016eb..942d7cc1305eb4 100644
--- a/lib/.eslintrc.yaml
+++ b/lib/.eslintrc.yaml
@@ -175,6 +175,10 @@ rules:
message: Use `const { structuredClone } = require('internal/structured_clone');` instead of the global.
- name: SubtleCrypto
message: Use `const { SubtleCrypto } = require('internal/crypto/webcrypto');` instead of the global.
+ no-restricted-modules:
+ - error
+ - name: url
+ message: Require `internal/url` instead of `url`.
# Custom rules in tools/eslint-rules
node-core/avoid-prototype-pollution: error
node-core/lowercase-name-for-primitive: error
diff --git a/lib/fs.js b/lib/fs.js
index b17cf4f10cd3c1..29f356a57cd22e 100644
--- a/lib/fs.js
+++ b/lib/fs.js
@@ -131,7 +131,7 @@ const {
CHAR_BACKWARD_SLASH,
} = require('internal/constants');
const {
- isUint32,
+ isInt32,
parseFileMode,
validateBoolean,
validateBuffer,
@@ -141,7 +141,7 @@ const {
validateObject,
validateString,
} = require('internal/validators');
-const { readFileSyncUtf8 } = require('internal/fs/read/utf8');
+const syncFs = require('internal/fs/sync');
let truncateWarn = true;
let fs;
@@ -201,7 +201,7 @@ function makeStatsCallback(cb) {
};
}
-const isFd = isUint32;
+const isFd = isInt32;
function isFileType(stats, fileType) {
// Use stats array directly to avoid creating an fs.Stats instance just for
@@ -243,12 +243,7 @@ function access(path, mode, callback) {
* @returns {void}
*/
function accessSync(path, mode) {
- path = getValidatedPath(path);
- mode = getValidMode(mode, 'access');
-
- const ctx = { path };
- binding.access(pathModule.toNamespacedPath(path), mode, undefined, ctx);
- handleErrorFromBinding(ctx);
+ syncFs.access(path, mode);
}
/**
@@ -290,23 +285,7 @@ ObjectDefineProperty(exists, kCustomPromisifiedSymbol, {
* @returns {boolean}
*/
function existsSync(path) {
- try {
- path = getValidatedPath(path);
- } catch {
- return false;
- }
- const ctx = { path };
- const nPath = pathModule.toNamespacedPath(path);
- binding.access(nPath, F_OK, undefined, ctx);
-
- // In case of an invalid symlink, `binding.access()` on win32
- // will **not** return an error and is therefore not enough.
- // Double check with `binding.stat()`.
- if (isWindows && ctx.errno === undefined) {
- binding.stat(nPath, false, undefined, ctx);
- }
-
- return ctx.errno === undefined;
+ return syncFs.exists(path);
}
function readFileAfterOpen(err, fd) {
@@ -458,14 +437,11 @@ function tryReadSync(fd, isUserFd, buffer, pos, len) {
function readFileSync(path, options) {
options = getOptions(options, { flag: 'r' });
- const isUserFd = isFd(path); // File descriptor ownership
-
- // TODO(@anonrig): Do not handle file descriptor ownership for now.
- if (!isUserFd && (options.encoding === 'utf8' || options.encoding === 'utf-8')) {
- path = getValidatedPath(path);
- return readFileSyncUtf8(pathModule.toNamespacedPath(path), stringToFlags(options.flag));
+ if (options.encoding === 'utf8' || options.encoding === 'utf-8') {
+ return syncFs.readFileUtf8(path, options.flag);
}
+ const isUserFd = isFd(path); // File descriptor ownership
const fd = isUserFd ? path : fs.openSync(path, options.flag, 0o666);
const stats = tryStatSync(fd, isUserFd);
@@ -540,11 +516,7 @@ function close(fd, callback = defaultCloseCallback) {
* @returns {void}
*/
function closeSync(fd) {
- fd = getValidatedFd(fd);
-
- const ctx = {};
- binding.close(fd, undefined, ctx);
- handleErrorFromBinding(ctx);
+ return syncFs.close(fd);
}
/**
@@ -590,16 +562,7 @@ function open(path, flags, mode, callback) {
* @returns {number}
*/
function openSync(path, flags, mode) {
- path = getValidatedPath(path);
- const flagsNumber = stringToFlags(flags);
- mode = parseFileMode(mode, 'mode', 0o666);
-
- const ctx = { path };
- const result = binding.open(pathModule.toNamespacedPath(path),
- flagsNumber, mode,
- undefined, ctx);
- handleErrorFromBinding(ctx);
- return result;
+ return syncFs.open(path, flags, mode);
}
/**
@@ -1702,25 +1665,12 @@ function lstatSync(path, options = { bigint: false, throwIfNoEntry: true }) {
* }} [options]
* @returns {Stats}
*/
-function statSync(path, options = { bigint: false, throwIfNoEntry: true }) {
- path = getValidatedPath(path);
- const ctx = { path };
- const stats = binding.stat(pathModule.toNamespacedPath(path),
- options.bigint, undefined, ctx);
- if (options.throwIfNoEntry === false && hasNoEntryError(ctx)) {
- return undefined;
- }
- handleErrorFromBinding(ctx);
- return getStatsFromBinding(stats);
+function statSync(path, options) {
+ return syncFs.stat(path, options);
}
-function statfsSync(path, options = { bigint: false }) {
- path = getValidatedPath(path);
- const ctx = { path };
- const stats = binding.statfs(pathModule.toNamespacedPath(path),
- options.bigint, undefined, ctx);
- handleErrorFromBinding(ctx);
- return getStatFsFromBinding(stats);
+function statfsSync(path, options) {
+ return syncFs.statfs(path, options);
}
/**
@@ -1900,10 +1850,7 @@ function unlink(path, callback) {
* @returns {void}
*/
function unlinkSync(path) {
- path = getValidatedPath(path);
- const ctx = { path };
- binding.unlink(pathModule.toNamespacedPath(path), undefined, ctx);
- handleErrorFromBinding(ctx);
+ return syncFs.unlink(path);
}
/**
@@ -2999,16 +2946,7 @@ function copyFile(src, dest, mode, callback) {
* @returns {void}
*/
function copyFileSync(src, dest, mode) {
- src = getValidatedPath(src, 'src');
- dest = getValidatedPath(dest, 'dest');
-
- const ctx = { path: src, dest }; // non-prefixed
-
- src = pathModule._makeLong(src);
- dest = pathModule._makeLong(dest);
- mode = getValidMode(mode, 'copyFile');
- binding.copyFile(src, dest, mode, undefined, ctx);
- handleErrorFromBinding(ctx);
+ syncFs.copyFile(src, dest, mode);
}
/**
diff --git a/lib/internal/blob.js b/lib/internal/blob.js
index 167c0521b4573d..d0e47c1a4397a8 100644
--- a/lib/internal/blob.js
+++ b/lib/internal/blob.js
@@ -321,6 +321,7 @@ class Blob {
const reader = this[kHandle].getReader();
return new lazyReadableStream({
+ type: 'bytes',
start(c) {
// There really should only be one read at a time so using an
// array here is purely defensive.
@@ -340,6 +341,9 @@ class Blob {
if (status === 0) {
// EOS
c.close();
+ // This is to signal the end for byob readers
+ // see https://streams.spec.whatwg.org/#example-rbs-pull
+ c.byobRequest?.respond(0);
const pending = this.pendingPulls.shift();
pending.resolve();
return;
@@ -353,13 +357,15 @@ class Blob {
pending.reject(error);
return;
}
- if (buffer !== undefined) {
+ // ReadableByteStreamController.enqueue errors if we submit a 0-length
+ // buffer. We need to check for that here.
+ if (buffer !== undefined && buffer.byteLength !== 0) {
c.enqueue(new Uint8Array(buffer));
}
// We keep reading until we either reach EOS, some error, or we
// hit the flow rate of the stream (c.desiredSize).
queueMicrotask(() => {
- if (c.desiredSize <= 0) {
+ if (c.desiredSize < 0) {
// A manual backpressure check.
if (this.pendingPulls.length !== 0) {
// A case of waiting pull finished (= not yet canceled)
diff --git a/lib/internal/bootstrap/realm.js b/lib/internal/bootstrap/realm.js
index 608e3072850d45..f9d096ca963464 100644
--- a/lib/internal/bootstrap/realm.js
+++ b/lib/internal/bootstrap/realm.js
@@ -174,9 +174,9 @@ const experimentalModuleList = new SafeSet();
};
}
-// Set up internalBinding() in the closure.
/**
- * @type {InternalBinding}
+ * Set up internalBinding() in the closure.
+ * @type {import('typings/globals').internalBinding}
*/
let internalBinding;
{
diff --git a/lib/internal/bootstrap/switches/does_own_process_state.js b/lib/internal/bootstrap/switches/does_own_process_state.js
index 85b5c3dfcb09ed..8f457de3e1183e 100644
--- a/lib/internal/bootstrap/switches/does_own_process_state.js
+++ b/lib/internal/bootstrap/switches/does_own_process_state.js
@@ -2,6 +2,12 @@
const credentials = internalBinding('credentials');
const rawMethods = internalBinding('process_methods');
+const {
+ namespace: {
+ addSerializeCallback,
+ isBuildingSnapshot,
+ },
+} = require('internal/v8/startup_snapshot');
process.abort = rawMethods.abort;
process.umask = wrappedUmask;
@@ -107,6 +113,12 @@ function wrapPosixCredentialSetters(credentials) {
// directory is changed by `chdir`, it'll be updated.
let cachedCwd = '';
+if (isBuildingSnapshot()) {
+ addSerializeCallback(() => {
+ cachedCwd = '';
+ });
+}
+
function wrappedChdir(directory) {
validateString(directory, 'directory');
rawMethods.chdir(directory);
diff --git a/lib/internal/bootstrap/switches/is_main_thread.js b/lib/internal/bootstrap/switches/is_main_thread.js
index f2c3478e8bb5bf..8707bc7daaa616 100644
--- a/lib/internal/bootstrap/switches/is_main_thread.js
+++ b/lib/internal/bootstrap/switches/is_main_thread.js
@@ -290,7 +290,7 @@ rawMethods.resetStdioForTesting = function() {
// Needed by the module loader and generally needed everywhere.
require('fs');
require('util');
-require('url');
+require('url'); // eslint-disable-line no-restricted-modules
require('internal/modules/cjs/loader');
require('internal/modules/esm/utils');
diff --git a/lib/internal/debugger/inspect_client.js b/lib/internal/debugger/inspect_client.js
index e467899fb3e746..315617bf08a800 100644
--- a/lib/internal/debugger/inspect_client.js
+++ b/lib/internal/debugger/inspect_client.js
@@ -15,7 +15,7 @@ const crypto = require('crypto');
const { ERR_DEBUGGER_ERROR } = require('internal/errors').codes;
const { EventEmitter } = require('events');
const http = require('http');
-const URL = require('url');
+const { URL } = require('internal/url');
const debuglog = require('internal/util/debuglog').debuglog('inspect');
@@ -297,7 +297,8 @@ class Client extends EventEmitter {
async _discoverWebsocketPath() {
const { 0: { webSocketDebuggerUrl } } = await this._fetchJSON('/json');
- return URL.parse(webSocketDebuggerUrl).path;
+ const { pathname, search } = new URL(webSocketDebuggerUrl);
+ return `${pathname}${search}`;
}
_connectWebsocket(urlPath) {
diff --git a/lib/internal/encoding.js b/lib/internal/encoding.js
index 996b2506a49d3b..a9bfb665c2f1e8 100644
--- a/lib/internal/encoding.js
+++ b/lib/internal/encoding.js
@@ -76,8 +76,11 @@ const empty = new Uint8Array(0);
const encodings = new SafeMap([
['unicode-1-1-utf-8', 'utf-8'],
+ ['unicode11utf8', 'utf-8'],
+ ['unicode20utf8', 'utf-8'],
['utf8', 'utf-8'],
['utf-8', 'utf-8'],
+ ['x-unicode20utf8', 'utf-8'],
['866', 'ibm866'],
['cp866', 'ibm866'],
['csibm866', 'ibm866'],
@@ -176,6 +179,7 @@ const encodings = new SafeMap([
['iso885915', 'iso-8859-15'],
['iso_8859-15', 'iso-8859-15'],
['l9', 'iso-8859-15'],
+ ['iso-8859-16', 'iso-8859-16'],
['cskoi8r', 'koi8-r'],
['koi', 'koi8-r'],
['koi8', 'koi8-r'],
@@ -283,9 +287,22 @@ const encodings = new SafeMap([
['ksc5601', 'euc-kr'],
['ksc_5601', 'euc-kr'],
['windows-949', 'euc-kr'],
+ ['csiso2022kr', 'replacement'],
+ ['hz-gb-2312', 'replacement'],
+ ['iso-2022-cn', 'replacement'],
+ ['iso-2022-cn-ext', 'replacement'],
+ ['iso-2022-kr', 'replacement'],
+ ['replacement', 'replacement'],
+ ['unicodefffe', 'utf-16be'],
['utf-16be', 'utf-16be'],
+ ['csunicode', 'utf-16le'],
+ ['iso-10646-ucs-2', 'utf-16le'],
+ ['ucs-2', 'utf-16le'],
+ ['unicode', 'utf-16le'],
+ ['unicodefeff', 'utf-16le'],
['utf-16le', 'utf-16le'],
['utf-16', 'utf-16le'],
+ ['x-user-defined', 'x-user-defined'],
]);
// Unfortunately, String.prototype.trim also removes non-ascii whitespace,
diff --git a/lib/internal/errors.js b/lib/internal/errors.js
index bed02da76a3bab..4e332e1ce18d16 100644
--- a/lib/internal/errors.js
+++ b/lib/internal/errors.js
@@ -66,7 +66,8 @@ const isWindows = process.platform === 'win32';
const messages = new SafeMap();
const codes = {};
-const classRegExp = /^([A-Z][a-z0-9]*)+$/;
+const classRegExp = /^[A-Z][a-zA-Z0-9]*$/;
+
// Sorted by a rough estimate on most frequently used entries.
const kTypes = [
'string',
@@ -1349,17 +1350,11 @@ E('ERR_INVALID_REPL_EVAL_CONFIG',
E('ERR_INVALID_REPL_INPUT', '%s', TypeError);
E('ERR_INVALID_RETURN_PROPERTY', (input, name, prop, value) => {
return `Expected a valid ${input} to be returned for the "${prop}" from the` +
- ` "${name}" function but got ${value}.`;
+ ` "${name}" function but got ${determineSpecificType(value)}.`;
}, TypeError);
E('ERR_INVALID_RETURN_PROPERTY_VALUE', (input, name, prop, value) => {
- let type;
- if (value?.constructor?.name) {
- type = `instance of ${value.constructor.name}`;
- } else {
- type = `type ${typeof value}`;
- }
return `Expected ${input} to be returned for the "${prop}" from the` +
- ` "${name}" function but got ${type}.`;
+ ` "${name}" function but got ${determineSpecificType(value)}.`;
}, TypeError);
E('ERR_INVALID_RETURN_VALUE', (input, name, value) => {
const type = determineSpecificType(value);
@@ -1375,8 +1370,13 @@ E('ERR_INVALID_SYNC_FORK_INPUT',
E('ERR_INVALID_THIS', 'Value of "this" must be of type %s', TypeError);
E('ERR_INVALID_TUPLE', '%s must be an iterable %s tuple', TypeError);
E('ERR_INVALID_URI', 'URI malformed', URIError);
-E('ERR_INVALID_URL', function(input) {
+E('ERR_INVALID_URL', function(input, base = null) {
this.input = input;
+
+ if (base != null) {
+ this.base = base;
+ }
+
// Don't include URL in message.
// (See https://github.com/nodejs/node/pull/38614)
return 'Invalid URL';
@@ -1455,7 +1455,7 @@ E('ERR_MISSING_ARGS',
E('ERR_MISSING_OPTION', '%s is required', TypeError);
E('ERR_MODULE_NOT_FOUND', function(path, base, exactUrl) {
if (exactUrl) {
- lazyInternalUtil().setOwnProperty(this, 'url', exactUrl);
+ lazyInternalUtil().setOwnProperty(this, 'url', `${exactUrl}`);
}
return `Cannot find ${
exactUrl ? 'module' : 'package'} '${path}' imported from ${base}`;
@@ -1548,7 +1548,7 @@ E('ERR_REQUIRE_ESM',
msg += `\n${basename} is treated as an ES module file as it is a .js ` +
'file whose nearest parent package.json contains "type": "module" ' +
'which declares all .js files in that package scope as ES modules.' +
- `\nInstead rename ${basename} to end in .cjs, change the requiring ` +
+ `\nInstead either rename ${basename} to end in .cjs, change the requiring ` +
'code to use dynamic import() which is available in all CommonJS ' +
'modules, or change "type": "module" to "type": "commonjs" in ' +
`${packageJsonPath} to treat all .js files as CommonJS (using .mjs for ` +
diff --git a/lib/internal/freeze_intrinsics.js b/lib/internal/freeze_intrinsics.js
index 72ba32589338b0..793c19df1e9138 100644
--- a/lib/internal/freeze_intrinsics.js
+++ b/lib/internal/freeze_intrinsics.js
@@ -203,7 +203,6 @@ module.exports = function() {
// 25 Structured Data
ArrayBufferPrototype, // 25.1
- SharedArrayBuffer.prototype, // 25.2
DataViewPrototype, // 25.3
// 26 Managing Memory
@@ -309,7 +308,6 @@ module.exports = function() {
// 25 Structured Data
ArrayBuffer, // 25.1
- SharedArrayBuffer, // 25.2
DataView, // 25.3
Atomics, // 25.4
// eslint-disable-next-line node-core/prefer-primordials
@@ -354,6 +352,11 @@ module.exports = function() {
WebAssembly,
];
+ if (typeof SharedArrayBuffer !== 'undefined') { // 25.2
+ ArrayPrototypePush(intrinsicPrototypes, SharedArrayBuffer.prototype);
+ ArrayPrototypePush(intrinsics, SharedArrayBuffer);
+ }
+
if (typeof Intl !== 'undefined') {
ArrayPrototypePush(intrinsicPrototypes,
Intl.Collator.prototype,
diff --git a/lib/internal/fs/dir.js b/lib/internal/fs/dir.js
index ec0562843d5f5c..1118ff5f674915 100644
--- a/lib/internal/fs/dir.js
+++ b/lib/internal/fs/dir.js
@@ -152,7 +152,7 @@ class Dir {
ArrayPrototypePush(
this[kDirBufferedEntries],
getDirent(
- pathModule.join(path, result[i]),
+ path,
result[i],
result[i + 1],
),
@@ -161,9 +161,10 @@ class Dir {
}
readSyncRecursive(dirent) {
- const ctx = { path: dirent.path };
+ const path = pathModule.join(dirent.path, dirent.name);
+ const ctx = { path };
const handle = dirBinding.opendir(
- pathModule.toNamespacedPath(dirent.path),
+ pathModule.toNamespacedPath(path),
this[kDirOptions].encoding,
undefined,
ctx,
@@ -177,7 +178,7 @@ class Dir {
);
if (result) {
- this.processReadResult(dirent.path, result);
+ this.processReadResult(path, result);
}
handle.close(undefined, ctx);
@@ -321,18 +322,11 @@ function opendir(path, options, callback) {
function opendirSync(path, options) {
path = getValidatedPath(path);
- options = getOptions(options, {
- encoding: 'utf8',
- });
+ options = getOptions(options, { encoding: 'utf8' });
- const ctx = { path };
- const handle = dirBinding.opendir(
+ const handle = dirBinding.opendirSync(
pathModule.toNamespacedPath(path),
- options.encoding,
- undefined,
- ctx,
);
- handleErrorFromBinding(ctx);
return new Dir(handle, path, options);
}
diff --git a/lib/internal/fs/read/utf8.js b/lib/internal/fs/read/utf8.js
deleted file mode 100644
index 5159db5988ee0b..00000000000000
--- a/lib/internal/fs/read/utf8.js
+++ /dev/null
@@ -1,25 +0,0 @@
-'use strict';
-
-const { handleErrorFromBinding } = require('internal/fs/utils');
-
-const binding = internalBinding('fs');
-
-/**
- * @param {string} path
- * @param {number} flag
- * @return {string}
- */
-function readFileSyncUtf8(path, flag) {
- const response = binding.readFileSync(path, flag);
-
- if (typeof response === 'string') {
- return response;
- }
-
- const { 0: errno, 1: syscall } = response;
- handleErrorFromBinding({ errno, syscall, path });
-}
-
-module.exports = {
- readFileSyncUtf8,
-};
diff --git a/lib/internal/fs/sync.js b/lib/internal/fs/sync.js
new file mode 100644
index 00000000000000..fbcc2ad2e25b2a
--- /dev/null
+++ b/lib/internal/fs/sync.js
@@ -0,0 +1,106 @@
+'use strict';
+
+const pathModule = require('path');
+const {
+ getValidatedPath,
+ stringToFlags,
+ getValidMode,
+ getStatsFromBinding,
+ getStatFsFromBinding,
+ getValidatedFd,
+} = require('internal/fs/utils');
+const { parseFileMode, isInt32 } = require('internal/validators');
+
+const binding = internalBinding('fs');
+
+/**
+ * @param {string} path
+ * @param {number} flag
+ * @return {string}
+ */
+function readFileUtf8(path, flag) {
+ if (!isInt32(path)) {
+ path = pathModule.toNamespacedPath(getValidatedPath(path));
+ }
+ return binding.readFileUtf8(path, stringToFlags(flag));
+}
+
+function exists(path) {
+ try {
+ path = getValidatedPath(path);
+ } catch {
+ return false;
+ }
+
+ return binding.existsSync(pathModule.toNamespacedPath(path));
+}
+
+function access(path, mode) {
+ path = getValidatedPath(path);
+ mode = getValidMode(mode, 'access');
+
+ binding.accessSync(pathModule.toNamespacedPath(path), mode);
+}
+
+function copyFile(src, dest, mode) {
+ src = getValidatedPath(src, 'src');
+ dest = getValidatedPath(dest, 'dest');
+
+ binding.copyFileSync(
+ pathModule.toNamespacedPath(src),
+ pathModule.toNamespacedPath(dest),
+ getValidMode(mode, 'copyFile'),
+ );
+}
+
+function stat(path, options = { bigint: false, throwIfNoEntry: true }) {
+ path = getValidatedPath(path);
+ const stats = binding.statSync(
+ pathModule.toNamespacedPath(path),
+ options.bigint,
+ options.throwIfNoEntry,
+ );
+ if (stats === undefined) {
+ return undefined;
+ }
+ return getStatsFromBinding(stats);
+}
+
+function statfs(path, options = { bigint: false }) {
+ path = getValidatedPath(path);
+ const stats = binding.statfsSync(pathModule.toNamespacedPath(path), options.bigint);
+ return getStatFsFromBinding(stats);
+}
+
+function open(path, flags, mode) {
+ path = getValidatedPath(path);
+
+ return binding.openSync(
+ pathModule.toNamespacedPath(path),
+ stringToFlags(flags),
+ parseFileMode(mode, 'mode', 0o666),
+ );
+}
+
+function close(fd) {
+ fd = getValidatedFd(fd);
+
+ return binding.closeSync(fd);
+}
+
+function unlink(path) {
+ path = pathModule.toNamespacedPath(getValidatedPath(path));
+ return binding.unlinkSync(path);
+}
+
+module.exports = {
+ readFileUtf8,
+ exists,
+ access,
+ copyFile,
+ stat,
+ statfs,
+ open,
+ close,
+ unlink,
+};
diff --git a/lib/internal/fs/utils.js b/lib/internal/fs/utils.js
index 6e6c7ee58cf5d1..2fc7bf61e9c488 100644
--- a/lib/internal/fs/utils.js
+++ b/lib/internal/fs/utils.js
@@ -234,7 +234,7 @@ function join(path, name) {
}
if (typeof path === 'string' && typeof name === 'string') {
- return pathModule.basename(path) === name ? path : pathModule.join(path, name);
+ return pathModule.join(path, name);
}
if (isUint8Array(path) && isUint8Array(name)) {
diff --git a/lib/internal/main/mksnapshot.js b/lib/internal/main/mksnapshot.js
index 52d859d491a93f..34701716839326 100644
--- a/lib/internal/main/mksnapshot.js
+++ b/lib/internal/main/mksnapshot.js
@@ -132,7 +132,7 @@ function requireForUserSnapshot(id) {
function main() {
- prepareMainThreadExecution(true, false);
+ prepareMainThreadExecution(false, false);
initializeCallbacks();
let stackTraceLimitDesc;
diff --git a/lib/internal/modules/esm/hooks.js b/lib/internal/modules/esm/hooks.js
index b7e1afac31060d..05885050b82fc9 100644
--- a/lib/internal/modules/esm/hooks.js
+++ b/lib/internal/modules/esm/hooks.js
@@ -73,6 +73,7 @@ let importMetaInitializer;
/**
* @typedef {object} ExportedHooks
+ * @property {Function} initialize Customizations setup hook.
* @property {Function} globalPreload Global preload hook.
* @property {Function} resolve Resolve hook.
* @property {Function} load Load hook.
@@ -140,7 +141,7 @@ class Hooks {
parentURL,
kEmptyObject,
);
- return this.addCustomLoader(urlOrSpecifier, keyedExports, data);
+ await this.addCustomLoader(urlOrSpecifier, keyedExports, data);
}
/**
@@ -150,7 +151,7 @@ class Hooks {
* @param {Record} exports
* @param {any} [data] Arbitrary data to be passed from the custom loader (user-land)
* to the worker.
- * @returns {any} The result of the loader's `initialize` hook, if provided.
+ * @returns {any | Promise} User data, ignored unless it's a promise, in which case it will be awaited.
*/
addCustomLoader(url, exports, data) {
const {
diff --git a/lib/internal/modules/esm/loader.js b/lib/internal/modules/esm/loader.js
index 5305c1eb8f37d9..fb76e90c497faa 100644
--- a/lib/internal/modules/esm/loader.js
+++ b/lib/internal/modules/esm/loader.js
@@ -4,9 +4,16 @@
require('internal/modules/cjs/loader');
const {
+ ArrayPrototypeJoin,
+ ArrayPrototypeMap,
+ ArrayPrototypeReduce,
FunctionPrototypeCall,
+ JSONStringify,
ObjectSetPrototypeOf,
+ RegExpPrototypeSymbolReplace,
SafeWeakMap,
+ encodeURIComponent,
+ hardenRegExp,
} = primordials;
const {
@@ -14,7 +21,7 @@ const {
ERR_UNKNOWN_MODULE_FORMAT,
} = require('internal/errors').codes;
const { getOptionValue } = require('internal/options');
-const { pathToFileURL } = require('internal/url');
+const { pathToFileURL, isURL } = require('internal/url');
const { emitExperimentalWarning } = require('internal/util');
const {
getDefaultConditions,
@@ -320,7 +327,7 @@ class ModuleLoader {
// eslint-disable-next-line no-use-before-define
this.setCustomizations(new CustomizedModuleLoader());
}
- return this.#customizations.register(specifier, parentURL, data, transferList);
+ return this.#customizations.register(`${specifier}`, `${parentURL}`, data, transferList);
}
/**
@@ -498,7 +505,7 @@ class CustomizedModuleLoader {
}
}
-let emittedExperimentalWarning = false;
+let emittedLoaderFlagWarning = false;
/**
* A loader instance is used as the main entry point for loading ES modules. Currently, this is a singleton; there is
* only one used for loading the main module and everything in its dependency graph, though separate instances of this
@@ -514,9 +521,24 @@ function createModuleLoader(useCustomLoadersIfPresent = true) {
!require('internal/modules/esm/utils').isLoaderWorker()) {
const userLoaderPaths = getOptionValue('--experimental-loader');
if (userLoaderPaths.length > 0) {
- if (!emittedExperimentalWarning) {
- emitExperimentalWarning('Custom ESM Loaders');
- emittedExperimentalWarning = true;
+ if (!emittedLoaderFlagWarning) {
+ const readableURIEncode = (string) => ArrayPrototypeReduce(
+ [
+ [/'/g, '%27'], // We need to URL-encode the single quote as it's the delimiter for the --import flag.
+ [/%22/g, '"'], // We can decode the double quotes to improve readability.
+ [/%2F/ig, '/'], // We can decode the slashes to improve readability.
+ ],
+ (str, { 0: regex, 1: replacement }) => RegExpPrototypeSymbolReplace(hardenRegExp(regex), str, replacement),
+ encodeURIComponent(string));
+ process.emitWarning(
+ '`--experimental-loader` may be removed in the future; instead use `register()`:\n' +
+ `--import 'data:text/javascript,import { register } from "node:module"; import { pathToFileURL } from "node:url"; ${ArrayPrototypeJoin(
+ ArrayPrototypeMap(userLoaderPaths, (loader) => `register(${readableURIEncode(JSONStringify(loader))}, pathToFileURL("./"))`),
+ '; ',
+ )};'`,
+ 'ExperimentalWarning',
+ );
+ emittedLoaderFlagWarning = true;
}
customizations = new CustomizedModuleLoader();
}
@@ -541,14 +563,14 @@ function getHooksProxy() {
/**
* Register a single loader programmatically.
- * @param {string} specifier
- * @param {string} [parentURL] Base to use when resolving `specifier`; optional if
+ * @param {string|import('url').URL} specifier
+ * @param {string|import('url').URL} [parentURL] Base to use when resolving `specifier`; optional if
* `specifier` is absolute. Same as `options.parentUrl`, just inline
* @param {object} [options] Additional options to apply, described below.
- * @param {string} [options.parentURL] Base to use when resolving `specifier`
+ * @param {string|import('url').URL} [options.parentURL] Base to use when resolving `specifier`
* @param {any} [options.data] Arbitrary data passed to the loader's `initialize` hook
* @param {any[]} [options.transferList] Objects in `data` that are changing ownership
- * @returns {any} The result of the loader's initialize hook, if any
+ * @returns {void} We want to reserve the return value for potential future extension of the API.
* @example
* ```js
* register('./myLoader.js');
@@ -570,12 +592,12 @@ function getHooksProxy() {
*/
function register(specifier, parentURL = undefined, options) {
const moduleLoader = require('internal/process/esm_loader').esmLoader;
- if (parentURL != null && typeof parentURL === 'object') {
+ if (parentURL != null && typeof parentURL === 'object' && !isURL(parentURL)) {
options = parentURL;
parentURL = options.parentURL;
}
- return moduleLoader.register(
- `${specifier}`,
+ moduleLoader.register(
+ specifier,
parentURL ?? 'data:',
options?.data,
options?.transferList,
diff --git a/lib/internal/modules/esm/resolve.js b/lib/internal/modules/esm/resolve.js
index 6d1793c06a4270..acb5ddca8af3d2 100644
--- a/lib/internal/modules/esm/resolve.js
+++ b/lib/internal/modules/esm/resolve.js
@@ -37,6 +37,7 @@ const experimentalNetworkImports =
getOptionValue('--experimental-network-imports');
const typeFlag = getOptionValue('--input-type');
const { URL, pathToFileURL, fileURLToPath, isURL } = require('internal/url');
+const { getCWDURL } = require('internal/util');
const { canParse: URLCanParse } = internalBinding('url');
const { legacyMainResolve: FSLegacyMainResolve } = internalBinding('fs');
const {
@@ -208,7 +209,15 @@ function finalizeResolution(resolved, base, preserveSymlinks) {
resolved.pathname, 'must not include encoded "/" or "\\" characters',
fileURLToPath(base));
- const path = fileURLToPath(resolved);
+ let path;
+ try {
+ path = fileURLToPath(resolved);
+ } catch (err) {
+ const { setOwnProperty } = require('internal/util');
+ setOwnProperty(err, 'input', `${resolved}`);
+ setOwnProperty(err, 'module', `${base}`);
+ throw err;
+ }
const stats = internalModuleStat(toNamespacedPath(StringPrototypeEndsWith(path, '/') ?
StringPrototypeSlice(path, -1) : path));
@@ -1017,7 +1026,7 @@ function defaultResolve(specifier, context = {}) {
const isMain = parentURL === undefined;
if (isMain) {
- parentURL = pathToFileURL(`${process.cwd()}/`).href;
+ parentURL = getCWDURL().href;
// This is the initial entry point to the program, and --input-type has
// been passed as an option; but --input-type can only be used with
diff --git a/lib/internal/modules/esm/utils.js b/lib/internal/modules/esm/utils.js
index 5014c99b2a9eb3..64bc21a47c7845 100644
--- a/lib/internal/modules/esm/utils.js
+++ b/lib/internal/modules/esm/utils.js
@@ -16,7 +16,7 @@ const {
loadPreloadModules,
initializeFrozenIntrinsics,
} = require('internal/process/pre_execution');
-const { pathToFileURL } = require('internal/url');
+const { getCWDURL } = require('internal/util');
const {
setImportModuleDynamicallyCallback,
setInitializeImportMetaObjectCallback,
@@ -112,15 +112,6 @@ function isLoaderWorker() {
async function initializeHooks() {
const customLoaderURLs = getOptionValue('--experimental-loader');
- let cwd;
- try {
- // `process.cwd()` can fail if the parent directory is deleted while the process runs.
- cwd = process.cwd() + '/';
- } catch {
- cwd = '/';
- }
-
-
const { Hooks } = require('internal/modules/esm/hooks');
const esmLoader = require('internal/process/esm_loader').esmLoader;
@@ -137,7 +128,7 @@ async function initializeHooks() {
loadPreloadModules();
initializeFrozenIntrinsics();
- const parentURL = pathToFileURL(cwd).href;
+ const parentURL = getCWDURL().href;
for (let i = 0; i < customLoaderURLs.length; i++) {
await hooks.register(
customLoaderURLs[i],
diff --git a/lib/internal/perf/performance_entry.js b/lib/internal/perf/performance_entry.js
index 036bfc173bd024..aa97a652626606 100644
--- a/lib/internal/perf/performance_entry.js
+++ b/lib/internal/perf/performance_entry.js
@@ -2,7 +2,6 @@
const {
ObjectDefineProperties,
- ReflectConstruct,
Symbol,
} = primordials;
@@ -25,14 +24,17 @@ const kEntryType = Symbol('PerformanceEntry.EntryType');
const kStartTime = Symbol('PerformanceEntry.StartTime');
const kDuration = Symbol('PerformanceEntry.Duration');
const kDetail = Symbol('NodePerformanceEntry.Detail');
+const kSkipThrow = Symbol('kSkipThrow');
function isPerformanceEntry(obj) {
return obj?.[kName] !== undefined;
}
class PerformanceEntry {
- constructor() {
- throw new ERR_ILLEGAL_CONSTRUCTOR();
+ constructor(skipThrowSymbol = undefined) {
+ if (skipThrowSymbol !== kSkipThrow) {
+ throw new ERR_ILLEGAL_CONSTRUCTOR();
+ }
}
get name() {
@@ -92,9 +94,11 @@ function initPerformanceEntry(entry, name, type, start, duration) {
}
function createPerformanceEntry(name, type, start, duration) {
- return ReflectConstruct(function PerformanceEntry() {
- initPerformanceEntry(this, name, type, start, duration);
- }, [], PerformanceEntry);
+ const entry = new PerformanceEntry(kSkipThrow);
+
+ initPerformanceEntry(entry, name, type, start, duration);
+
+ return entry;
}
/**
@@ -119,10 +123,12 @@ class PerformanceNodeEntry extends PerformanceEntry {
}
function createPerformanceNodeEntry(name, type, start, duration, detail) {
- return ReflectConstruct(function PerformanceNodeEntry() {
- initPerformanceEntry(this, name, type, start, duration);
- this[kDetail] = detail;
- }, [], PerformanceNodeEntry);
+ const entry = new PerformanceNodeEntry(kSkipThrow);
+
+ initPerformanceEntry(entry, name, type, start, duration);
+ entry[kDetail] = detail;
+
+ return entry;
}
module.exports = {
diff --git a/lib/internal/process/esm_loader.js b/lib/internal/process/esm_loader.js
index e735101ab18c47..a3451ddab307f2 100644
--- a/lib/internal/process/esm_loader.js
+++ b/lib/internal/process/esm_loader.js
@@ -9,8 +9,7 @@ const { getOptionValue } = require('internal/options');
const {
hasUncaughtExceptionCaptureCallback,
} = require('internal/process/execution');
-const { pathToFileURL } = require('internal/url');
-const { kEmptyObject } = require('internal/util');
+const { kEmptyObject, getCWDURL } = require('internal/util');
let esmLoader;
@@ -23,14 +22,7 @@ module.exports = {
try {
const userImports = getOptionValue('--import');
if (userImports.length > 0) {
- let cwd;
- try {
- // `process.cwd()` can fail if the parent directory is deleted while the process runs.
- cwd = process.cwd() + '/';
- } catch {
- cwd = '/';
- }
- const parentURL = pathToFileURL(cwd).href;
+ const parentURL = getCWDURL().href;
await SafePromiseAllReturnVoid(userImports, (specifier) => esmLoader.import(
specifier,
parentURL,
diff --git a/lib/internal/process/execution.js b/lib/internal/process/execution.js
index afe2ba2c2c977b..4b77aa47c2cb35 100644
--- a/lib/internal/process/execution.js
+++ b/lib/internal/process/execution.js
@@ -54,7 +54,7 @@ function evalModule(source, print) {
function evalScript(name, body, breakFirstLine, print, shouldLoadESM = false) {
const CJSModule = require('internal/modules/cjs/loader').Module;
const { kVmBreakFirstLineSymbol } = require('internal/util');
- const { pathToFileURL } = require('url');
+ const { pathToFileURL } = require('internal/url');
const cwd = tryGetCwd();
const origModule = globalThis.module; // Set e.g. when called from the REPL.
diff --git a/lib/internal/process/pre_execution.js b/lib/internal/process/pre_execution.js
index 0627b06be89c2b..1f4a08515b5ae9 100644
--- a/lib/internal/process/pre_execution.js
+++ b/lib/internal/process/pre_execution.js
@@ -2,15 +2,23 @@
const {
ArrayPrototypeForEach,
+ Date,
+ DatePrototypeGetDate,
+ DatePrototypeGetFullYear,
+ DatePrototypeGetHours,
+ DatePrototypeGetMinutes,
+ DatePrototypeGetMonth,
+ DatePrototypeGetSeconds,
NumberParseInt,
ObjectDefineProperties,
ObjectDefineProperty,
ObjectGetOwnPropertyDescriptor,
SafeMap,
+ String,
StringPrototypeStartsWith,
Symbol,
- SymbolDispose,
SymbolAsyncDispose,
+ SymbolDispose,
globalThis,
} = primordials;
@@ -400,6 +408,7 @@ function initializeReportSignalHandlers() {
function initializeHeapSnapshotSignalHandlers() {
const signal = getOptionValue('--heapsnapshot-signal');
+ const diagnosticDir = getOptionValue('--diagnostic-dir');
if (!signal)
return;
@@ -408,7 +417,8 @@ function initializeHeapSnapshotSignalHandlers() {
const { writeHeapSnapshot } = require('v8');
function doWriteHeapSnapshot() {
- writeHeapSnapshot();
+ const heapSnapshotFilename = getHeapSnapshotFilename(diagnosticDir);
+ writeHeapSnapshot(heapSnapshotFilename);
}
process.on(signal, doWriteHeapSnapshot);
@@ -700,6 +710,31 @@ function markBootstrapComplete() {
internalBinding('performance').markBootstrapComplete();
}
+// Sequence number for diagnostic filenames
+let sequenceNumOfheapSnapshot = 0;
+
+// To generate the HeapSnapshotFilename while using custom diagnosticDir
+function getHeapSnapshotFilename(diagnosticDir) {
+ if (!diagnosticDir) return undefined;
+
+ const date = new Date();
+
+ const year = DatePrototypeGetFullYear(date);
+ const month = String(DatePrototypeGetMonth(date) + 1).padStart(2, '0');
+ const day = String(DatePrototypeGetDate(date)).padStart(2, '0');
+ const hours = String(DatePrototypeGetHours(date)).padStart(2, '0');
+ const minutes = String(DatePrototypeGetMinutes(date)).padStart(2, '0');
+ const seconds = String(DatePrototypeGetSeconds(date)).padStart(2, '0');
+
+ const dateString = `${year}${month}${day}`;
+ const timeString = `${hours}${minutes}${seconds}`;
+ const pid = process.pid;
+ const threadId = internalBinding('worker').threadId;
+ const fileSequence = (++sequenceNumOfheapSnapshot).toString().padStart(3, '0');
+
+ return `${diagnosticDir}/Heap.${dateString}.${timeString}.${pid}.${threadId}.${fileSequence}.heapsnapshot`;
+}
+
module.exports = {
setupUserModules,
prepareMainThreadExecution,
diff --git a/lib/internal/streams/readable.js b/lib/internal/streams/readable.js
index 1b40192d9458ba..49df23cba9f4c2 100644
--- a/lib/internal/streams/readable.js
+++ b/lib/internal/streams/readable.js
@@ -83,6 +83,75 @@ const nop = () => {};
const { errorOrDestroy } = destroyImpl;
+const kObjectMode = 1 << 0;
+const kEnded = 1 << 1;
+const kEndEmitted = 1 << 2;
+const kReading = 1 << 3;
+const kConstructed = 1 << 4;
+const kSync = 1 << 5;
+const kNeedReadable = 1 << 6;
+const kEmittedReadable = 1 << 7;
+const kReadableListening = 1 << 8;
+const kResumeScheduled = 1 << 9;
+const kErrorEmitted = 1 << 10;
+const kEmitClose = 1 << 11;
+const kAutoDestroy = 1 << 12;
+const kDestroyed = 1 << 13;
+const kClosed = 1 << 14;
+const kCloseEmitted = 1 << 15;
+const kMultiAwaitDrain = 1 << 16;
+const kReadingMore = 1 << 17;
+const kDataEmitted = 1 << 18;
+
+// TODO(benjamingr) it is likely slower to do it this way than with free functions
+function makeBitMapDescriptor(bit) {
+ return {
+ enumerable: false,
+ get() { return (this.state & bit) !== 0; },
+ set(value) {
+ if (value) this.state |= bit;
+ else this.state &= ~bit;
+ },
+ };
+}
+ObjectDefineProperties(ReadableState.prototype, {
+ objectMode: makeBitMapDescriptor(kObjectMode),
+ ended: makeBitMapDescriptor(kEnded),
+ endEmitted: makeBitMapDescriptor(kEndEmitted),
+ reading: makeBitMapDescriptor(kReading),
+ // Stream is still being constructed and cannot be
+ // destroyed until construction finished or failed.
+ // Async construction is opt in, therefore we start as
+ // constructed.
+ constructed: makeBitMapDescriptor(kConstructed),
+ // A flag to be able to tell if the event 'readable'/'data' is emitted
+ // immediately, or on a later tick. We set this to true at first, because
+ // any actions that shouldn't happen until "later" should generally also
+ // not happen before the first read call.
+ sync: makeBitMapDescriptor(kSync),
+ // Whenever we return null, then we set a flag to say
+ // that we're awaiting a 'readable' event emission.
+ needReadable: makeBitMapDescriptor(kNeedReadable),
+ emittedReadable: makeBitMapDescriptor(kEmittedReadable),
+ readableListening: makeBitMapDescriptor(kReadableListening),
+ resumeScheduled: makeBitMapDescriptor(kResumeScheduled),
+ // True if the error was already emitted and should not be thrown again.
+ errorEmitted: makeBitMapDescriptor(kErrorEmitted),
+ emitClose: makeBitMapDescriptor(kEmitClose),
+ autoDestroy: makeBitMapDescriptor(kAutoDestroy),
+ // Has it been destroyed.
+ destroyed: makeBitMapDescriptor(kDestroyed),
+ // Indicates whether the stream has finished destroying.
+ closed: makeBitMapDescriptor(kClosed),
+ // True if close has been emitted or would have been emitted
+ // depending on emitClose.
+ closeEmitted: makeBitMapDescriptor(kCloseEmitted),
+ multiAwaitDrain: makeBitMapDescriptor(kMultiAwaitDrain),
+ // If true, a maybeReadMore has been scheduled.
+ readingMore: makeBitMapDescriptor(kReadingMore),
+ dataEmitted: makeBitMapDescriptor(kDataEmitted),
+});
+
function ReadableState(options, stream, isDuplex) {
// Duplex streams are both readable and writable, but share
// the same options object.
@@ -92,13 +161,15 @@ function ReadableState(options, stream, isDuplex) {
if (typeof isDuplex !== 'boolean')
isDuplex = stream instanceof Stream.Duplex;
+ // Bit map field to store ReadableState more effciently with 1 bit per field
+ // instead of a V8 slot per field.
+ this.state = kEmitClose | kAutoDestroy | kConstructed | kSync;
// Object stream flag. Used to make read(n) ignore n and to
// make all the buffer merging and length checks go away.
- this.objectMode = !!(options && options.objectMode);
+ if (options && options.objectMode) this.state |= kObjectMode;
- if (isDuplex)
- this.objectMode = this.objectMode ||
- !!(options && options.readableObjectMode);
+ if (isDuplex && options && options.readableObjectMode)
+ this.state |= kObjectMode;
// The point at which it stops calling _read() to fill the buffer
// Note: 0 is a valid value, means "don't call _read preemptively ever"
@@ -113,41 +184,15 @@ function ReadableState(options, stream, isDuplex) {
this.length = 0;
this.pipes = [];
this.flowing = null;
- this.ended = false;
- this.endEmitted = false;
- this.reading = false;
-
- // Stream is still being constructed and cannot be
- // destroyed until construction finished or failed.
- // Async construction is opt in, therefore we start as
- // constructed.
- this.constructed = true;
- // A flag to be able to tell if the event 'readable'/'data' is emitted
- // immediately, or on a later tick. We set this to true at first, because
- // any actions that shouldn't happen until "later" should generally also
- // not happen before the first read call.
- this.sync = true;
-
- // Whenever we return null, then we set a flag to say
- // that we're awaiting a 'readable' event emission.
- this.needReadable = false;
- this.emittedReadable = false;
- this.readableListening = false;
- this.resumeScheduled = false;
this[kPaused] = null;
- // True if the error was already emitted and should not be thrown again.
- this.errorEmitted = false;
-
// Should close be emitted on destroy. Defaults to true.
- this.emitClose = !options || options.emitClose !== false;
+ if (options && options.emitClose === false) this.state &= ~kEmitClose;
// Should .destroy() be called after 'end' (and potentially 'finish').
- this.autoDestroy = !options || options.autoDestroy !== false;
+ if (options && options.autoDestroy === false) this.state &= ~kAutoDestroy;
- // Has it been destroyed.
- this.destroyed = false;
// Indicates whether the stream has errored. When true no further
// _read calls, 'data' or 'readable' events should occur. This is needed
@@ -155,12 +200,6 @@ function ReadableState(options, stream, isDuplex) {
// stream has failed.
this.errored = null;
- // Indicates whether the stream has finished destroying.
- this.closed = false;
-
- // True if close has been emitted or would have been emitted
- // depending on emitClose.
- this.closeEmitted = false;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
@@ -177,12 +216,6 @@ function ReadableState(options, stream, isDuplex) {
// Ref the piped dest which we need a drain event on it
// type: null | Writable | Set.
this.awaitDrainWriters = null;
- this.multiAwaitDrain = false;
-
- // If true, a maybeReadMore has been scheduled.
- this.readingMore = false;
-
- this.dataEmitted = false;
this.decoder = null;
this.encoding = null;
@@ -263,7 +296,7 @@ function readableAddChunk(stream, chunk, encoding, addToFront) {
const state = stream._readableState;
let err;
- if (!state.objectMode) {
+ if ((state.state & kObjectMode) === 0) {
if (typeof chunk === 'string') {
encoding = encoding || state.defaultEncoding;
if (state.encoding !== encoding) {
@@ -290,11 +323,11 @@ function readableAddChunk(stream, chunk, encoding, addToFront) {
if (err) {
errorOrDestroy(stream, err);
} else if (chunk === null) {
- state.reading = false;
+ state.state &= ~kReading;
onEofChunk(stream, state);
- } else if (state.objectMode || (chunk && chunk.length > 0)) {
+ } else if (((state.state & kObjectMode) !== 0) || (chunk && chunk.length > 0)) {
if (addToFront) {
- if (state.endEmitted)
+ if ((state.state & kEndEmitted) !== 0)
errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());
else if (state.destroyed || state.errored)
return false;
@@ -305,7 +338,7 @@ function readableAddChunk(stream, chunk, encoding, addToFront) {
} else if (state.destroyed || state.errored) {
return false;
} else {
- state.reading = false;
+ state.state &= ~kReading;
if (state.decoder && !encoding) {
chunk = state.decoder.write(chunk);
if (state.objectMode || chunk.length !== 0)
@@ -317,7 +350,7 @@ function readableAddChunk(stream, chunk, encoding, addToFront) {
}
}
} else if (!addToFront) {
- state.reading = false;
+ state.state &= ~kReading;
maybeReadMore(stream, state);
}
@@ -333,7 +366,7 @@ function addChunk(stream, state, chunk, addToFront) {
stream.listenerCount('data') > 0) {
// Use the guard to avoid creating `Set()` repeatedly
// when we have multiple pipes.
- if (state.multiAwaitDrain) {
+ if ((state.state & kMultiAwaitDrain) !== 0) {
state.awaitDrainWriters.clear();
} else {
state.awaitDrainWriters = null;
@@ -349,7 +382,7 @@ function addChunk(stream, state, chunk, addToFront) {
else
state.buffer.push(chunk);
- if (state.needReadable)
+ if ((state.state & kNeedReadable) !== 0)
emitReadable(stream);
}
maybeReadMore(stream, state);
@@ -404,7 +437,7 @@ function computeNewHighWaterMark(n) {
function howMuchToRead(n, state) {
if (n <= 0 || (state.length === 0 && state.ended))
return 0;
- if (state.objectMode)
+ if ((state.state & kObjectMode) !== 0)
return 1;
if (NumberIsNaN(n)) {
// Only flow one buffer at a time.
@@ -435,7 +468,7 @@ Readable.prototype.read = function(n) {
state.highWaterMark = computeNewHighWaterMark(n);
if (n !== 0)
- state.emittedReadable = false;
+ state.state &= ~kEmittedReadable;
// If we're doing read(0) to trigger a readable event, but we
// already have a bunch of data in the buffer, then just trigger
@@ -486,7 +519,7 @@ Readable.prototype.read = function(n) {
// 3. Actually pull the requested chunks out of the buffer and return.
// if we need a readable event, then we need to do some reading.
- let doRead = state.needReadable;
+ let doRead = (state.state & kNeedReadable) !== 0;
debug('need readable', doRead);
// If we currently have less than the highWaterMark, then also read some.
@@ -504,11 +537,10 @@ Readable.prototype.read = function(n) {
debug('reading, ended or constructing', doRead);
} else if (doRead) {
debug('do read');
- state.reading = true;
- state.sync = true;
+ state.state |= kReading | kSync;
// If the length is currently zero, then we *need* a readable event.
if (state.length === 0)
- state.needReadable = true;
+ state.state |= kNeedReadable;
// Call internal read method
try {
@@ -516,8 +548,8 @@ Readable.prototype.read = function(n) {
} catch (err) {
errorOrDestroy(this, err);
}
+ state.state &= ~kSync;
- state.sync = false;
// If _read pushed data synchronously, then `reading` will be false,
// and we need to re-evaluate how much data we can return to the user.
if (!state.reading)
diff --git a/lib/internal/streams/writable.js b/lib/internal/streams/writable.js
index 718d4e48478677..595aadc23c8bec 100644
--- a/lib/internal/streams/writable.js
+++ b/lib/internal/streams/writable.js
@@ -74,6 +74,110 @@ function nop() {}
const kOnFinished = Symbol('kOnFinished');
+const kObjectMode = 1 << 0;
+const kEnded = 1 << 1;
+const kConstructed = 1 << 2;
+const kSync = 1 << 3;
+const kErrorEmitted = 1 << 4;
+const kEmitClose = 1 << 5;
+const kAutoDestroy = 1 << 6;
+const kDestroyed = 1 << 7;
+const kClosed = 1 << 8;
+const kCloseEmitted = 1 << 9;
+const kFinalCalled = 1 << 10;
+const kNeedDrain = 1 << 11;
+const kEnding = 1 << 12;
+const kFinished = 1 << 13;
+const kDecodeStrings = 1 << 14;
+const kWriting = 1 << 15;
+const kBufferProcessing = 1 << 16;
+const kPrefinished = 1 << 17;
+const kAllBuffers = 1 << 18;
+const kAllNoop = 1 << 19;
+
+// TODO(benjamingr) it is likely slower to do it this way than with free functions
+function makeBitMapDescriptor(bit) {
+ return {
+ enumerable: false,
+ get() { return (this.state & bit) !== 0; },
+ set(value) {
+ if (value) this.state |= bit;
+ else this.state &= ~bit;
+ },
+ };
+}
+ObjectDefineProperties(WritableState.prototype, {
+ // Object stream flag to indicate whether or not this stream
+ // contains buffers or objects.
+ objectMode: makeBitMapDescriptor(kObjectMode),
+
+ // if _final has been called.
+ finalCalled: makeBitMapDescriptor(kFinalCalled),
+
+ // drain event flag.
+ needDrain: makeBitMapDescriptor(kNeedDrain),
+
+ // At the start of calling end()
+ ending: makeBitMapDescriptor(kEnding),
+
+ // When end() has been called, and returned.
+ ended: makeBitMapDescriptor(kEnded),
+
+ // When 'finish' is emitted.
+ finished: makeBitMapDescriptor(kFinished),
+
+ // Has it been destroyed.
+ destroyed: makeBitMapDescriptor(kDestroyed),
+
+ // Should we decode strings into buffers before passing to _write?
+ // this is here so that some node-core streams can optimize string
+ // handling at a lower level.
+ decodeStrings: makeBitMapDescriptor(kDecodeStrings),
+
+ // A flag to see when we're in the middle of a write.
+ writing: makeBitMapDescriptor(kWriting),
+
+ // A flag to be able to tell if the onwrite cb is called immediately,
+ // or on a later tick. We set this to true at first, because any
+ // actions that shouldn't happen until "later" should generally also
+ // not happen before the first write call.
+ sync: makeBitMapDescriptor(kSync),
+
+ // A flag to know if we're processing previously buffered items, which
+ // may call the _write() callback in the same tick, so that we don't
+ // end up in an overlapped onwrite situation.
+ bufferProcessing: makeBitMapDescriptor(kBufferProcessing),
+
+ // Stream is still being constructed and cannot be
+ // destroyed until construction finished or failed.
+ // Async construction is opt in, therefore we start as
+ // constructed.
+ constructed: makeBitMapDescriptor(kConstructed),
+
+ // Emit prefinish if the only thing we're waiting for is _write cbs
+ // This is relevant for synchronous Transform streams.
+ prefinished: makeBitMapDescriptor(kPrefinished),
+
+ // True if the error was already emitted and should not be thrown again.
+ errorEmitted: makeBitMapDescriptor(kErrorEmitted),
+
+ // Should close be emitted on destroy. Defaults to true.
+ emitClose: makeBitMapDescriptor(kEmitClose),
+
+ // Should .destroy() be called after 'finish' (and potentially 'end').
+ autoDestroy: makeBitMapDescriptor(kAutoDestroy),
+
+ // Indicates whether the stream has finished destroying.
+ closed: makeBitMapDescriptor(kClosed),
+
+ // True if close has been emitted or would have been emitted
+ // depending on emitClose.
+ closeEmitted: makeBitMapDescriptor(kCloseEmitted),
+
+ allBuffers: makeBitMapDescriptor(kAllBuffers),
+ allNoop: makeBitMapDescriptor(kAllNoop),
+});
+
function WritableState(options, stream, isDuplex) {
// Duplex streams are both readable and writable, but share
// the same options object.
@@ -83,13 +187,12 @@ function WritableState(options, stream, isDuplex) {
if (typeof isDuplex !== 'boolean')
isDuplex = stream instanceof Stream.Duplex;
- // Object stream flag to indicate whether or not this stream
- // contains buffers or objects.
- this.objectMode = !!(options && options.objectMode);
+ // Bit map field to store WritableState more effciently with 1 bit per field
+ // instead of a V8 slot per field.
+ this.state = kSync | kConstructed | kEmitClose | kAutoDestroy;
- if (isDuplex)
- this.objectMode = this.objectMode ||
- !!(options && options.writableObjectMode);
+ if (options && options.objectMode) this.state |= kObjectMode;
+ if (isDuplex && options && options.writableObjectMode) this.state |= kObjectMode;
// The point at which write() starts returning false
// Note: 0 is a valid value, means that we always return false if
@@ -98,26 +201,13 @@ function WritableState(options, stream, isDuplex) {
getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex) :
getDefaultHighWaterMark(false);
- // if _final has been called.
- this.finalCalled = false;
+ if (!options || options.decodeStrings !== false) this.state |= kDecodeStrings;
- // drain event flag.
- this.needDrain = false;
- // At the start of calling end()
- this.ending = false;
- // When end() has been called, and returned.
- this.ended = false;
- // When 'finish' is emitted.
- this.finished = false;
+ // Should close be emitted on destroy. Defaults to true.
+ if (options && options.emitClose === false) this.state &= ~kEmitClose;
- // Has it been destroyed
- this.destroyed = false;
-
- // Should we decode strings into buffers before passing to _write?
- // this is here so that some node-core streams can optimize string
- // handling at a lower level.
- const noDecode = !!(options && options.decodeStrings === false);
- this.decodeStrings = !noDecode;
+ // Should .destroy() be called after 'end' (and potentially 'finish').
+ if (options && options.autoDestroy === false) this.state &= ~kAutoDestroy;
// Crypto is kind of old and crusty. Historically, its default string
// encoding is 'binary' so we have to make this configurable.
@@ -136,23 +226,9 @@ function WritableState(options, stream, isDuplex) {
// socket or file.
this.length = 0;
- // A flag to see when we're in the middle of a write.
- this.writing = false;
-
// When true all writes will be buffered until .uncork() call.
this.corked = 0;
- // A flag to be able to tell if the onwrite cb is called immediately,
- // or on a later tick. We set this to true at first, because any
- // actions that shouldn't happen until "later" should generally also
- // not happen before the first write call.
- this.sync = true;
-
- // A flag to know if we're processing previously buffered items, which
- // may call the _write() callback in the same tick, so that we don't
- // end up in an overlapped onwrite situation.
- this.bufferProcessing = false;
-
// The callback that's passed to _write(chunk, cb).
this.onwrite = onwrite.bind(undefined, stream);
@@ -172,45 +248,18 @@ function WritableState(options, stream, isDuplex) {
// this must be 0 before 'finish' can be emitted.
this.pendingcb = 0;
- // Stream is still being constructed and cannot be
- // destroyed until construction finished or failed.
- // Async construction is opt in, therefore we start as
- // constructed.
- this.constructed = true;
-
- // Emit prefinish if the only thing we're waiting for is _write cbs
- // This is relevant for synchronous Transform streams.
- this.prefinished = false;
-
- // True if the error was already emitted and should not be thrown again.
- this.errorEmitted = false;
-
- // Should close be emitted on destroy. Defaults to true.
- this.emitClose = !options || options.emitClose !== false;
-
- // Should .destroy() be called after 'finish' (and potentially 'end').
- this.autoDestroy = !options || options.autoDestroy !== false;
-
// Indicates whether the stream has errored. When true all write() calls
// should return false. This is needed since when autoDestroy
// is disabled we need a way to tell whether the stream has failed.
this.errored = null;
- // Indicates whether the stream has finished destroying.
- this.closed = false;
-
- // True if close has been emitted or would have been emitted
- // depending on emitClose.
- this.closeEmitted = false;
-
this[kOnFinished] = [];
}
function resetBuffer(state) {
state.buffered = [];
state.bufferedIndex = 0;
- state.allBuffers = true;
- state.allNoop = true;
+ state.state |= kAllBuffers | kAllNoop;
}
WritableState.prototype.getBuffer = function getBuffer() {
@@ -307,9 +356,9 @@ function _write(stream, chunk, encoding, cb) {
if (chunk === null) {
throw new ERR_STREAM_NULL_VALUES();
- } else if (!state.objectMode) {
+ } else if ((state.state & kObjectMode) === 0) {
if (typeof chunk === 'string') {
- if (state.decodeStrings !== false) {
+ if ((state.state & kDecodeStrings) !== 0) {
chunk = Buffer.from(chunk, encoding);
encoding = 'buffer';
}
@@ -325,9 +374,9 @@ function _write(stream, chunk, encoding, cb) {
}
let err;
- if (state.ending) {
+ if ((state.state & kEnding) !== 0) {
err = new ERR_STREAM_WRITE_AFTER_END();
- } else if (state.destroyed) {
+ } else if ((state.state & kDestroyed) !== 0) {
err = new ERR_STREAM_DESTROYED('write');
}
@@ -354,7 +403,7 @@ Writable.prototype.uncork = function() {
if (state.corked) {
state.corked--;
- if (!state.writing)
+ if ((state.state & kWriting) === 0)
clearBuffer(this, state);
}
};
@@ -373,7 +422,7 @@ Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) {
// in the queue, and wait our turn. Otherwise, call _write
// If we return false, then we need a drain event, so set that flag.
function writeOrBuffer(stream, state, chunk, encoding, callback) {
- const len = state.objectMode ? 1 : chunk.length;
+ const len = (state.state & kObjectMode) !== 0 ? 1 : chunk.length;
state.length += len;
@@ -381,42 +430,40 @@ function writeOrBuffer(stream, state, chunk, encoding, callback) {
const ret = state.length < state.highWaterMark;
// We must ensure that previous needDrain will not be reset to false.
if (!ret)
- state.needDrain = true;
+ state.state |= kNeedDrain;
- if (state.writing || state.corked || state.errored || !state.constructed) {
+ if ((state.state & kWriting) !== 0 || state.corked || state.errored || (state.state & kConstructed) === 0) {
state.buffered.push({ chunk, encoding, callback });
- if (state.allBuffers && encoding !== 'buffer') {
- state.allBuffers = false;
+ if ((state.state & kAllBuffers) !== 0 && encoding !== 'buffer') {
+ state.state &= ~kAllBuffers;
}
- if (state.allNoop && callback !== nop) {
- state.allNoop = false;
+ if ((state.state & kAllNoop) !== 0 && callback !== nop) {
+ state.state &= ~kAllNoop;
}
} else {
state.writelen = len;
state.writecb = callback;
- state.writing = true;
- state.sync = true;
+ state.state |= kWriting | kSync;
stream._write(chunk, encoding, state.onwrite);
- state.sync = false;
+ state.state &= ~kSync;
}
// Return false if errored or destroyed in order to break
// any synchronous while(stream.write(data)) loops.
- return ret && !state.errored && !state.destroyed;
+ return ret && !state.errored && (state.state & kDestroyed) === 0;
}
function doWrite(stream, state, writev, len, chunk, encoding, cb) {
state.writelen = len;
state.writecb = cb;
- state.writing = true;
- state.sync = true;
- if (state.destroyed)
+ state.state |= kWriting | kSync;
+ if ((state.state & kDestroyed) !== 0)
state.onwrite(new ERR_STREAM_DESTROYED('write'));
else if (writev)
stream._writev(chunk, state.onwrite);
else
stream._write(chunk, encoding, state.onwrite);
- state.sync = false;
+ state.state &= ~kSync;
}
function onwriteError(stream, state, er, cb) {
@@ -434,7 +481,7 @@ function onwriteError(stream, state, er, cb) {
function onwrite(stream, er) {
const state = stream._writableState;
- const sync = state.sync;
+ const sync = (state.state & kSync) !== 0;
const cb = state.writecb;
if (typeof cb !== 'function') {
@@ -442,7 +489,7 @@ function onwrite(stream, er) {
return;
}
- state.writing = false;
+ state.state &= ~kWriting;
state.writecb = null;
state.length -= state.writelen;
state.writelen = 0;
@@ -495,10 +542,9 @@ function afterWriteTick({ stream, state, count, cb }) {
}
function afterWrite(stream, state, count, cb) {
- const needDrain = !state.ending && !stream.destroyed && state.length === 0 &&
- state.needDrain;
+ const needDrain = (state.state & (kEnding | kNeedDrain)) === kNeedDrain && !stream.destroyed && state.length === 0;
if (needDrain) {
- state.needDrain = false;
+ state.state &= ~kNeedDrain;
stream.emit('drain');
}
@@ -507,7 +553,7 @@ function afterWrite(stream, state, count, cb) {
cb(null);
}
- if (state.destroyed) {
+ if ((state.state & kDestroyed) !== 0) {
errorBuffer(state);
}
@@ -516,13 +562,13 @@ function afterWrite(stream, state, count, cb) {
// If there's something in the buffer waiting, then invoke callbacks.
function errorBuffer(state) {
- if (state.writing) {
+ if ((state.state & kWriting) !== 0) {
return;
}
for (let n = state.bufferedIndex; n < state.buffered.length; ++n) {
const { chunk, callback } = state.buffered[n];
- const len = state.objectMode ? 1 : chunk.length;
+ const len = (state.state & kObjectMode) !== 0 ? 1 : chunk.length;
state.length -= len;
callback(state.errored ?? new ERR_STREAM_DESTROYED('write'));
}
@@ -538,13 +584,13 @@ function errorBuffer(state) {
// If there's something in the buffer waiting, then process it.
function clearBuffer(stream, state) {
if (state.corked ||
- state.bufferProcessing ||
- state.destroyed ||
- !state.constructed) {
+ (state.state & (kDestroyed | kBufferProcessing)) !== 0 ||
+ (state.state & kConstructed) === 0) {
return;
}
- const { buffered, bufferedIndex, objectMode } = state;
+ const objectMode = (state.state & kObjectMode) !== 0;
+ const { buffered, bufferedIndex } = state;
const bufferedLength = buffered.length - bufferedIndex;
if (!bufferedLength) {
@@ -553,20 +599,20 @@ function clearBuffer(stream, state) {
let i = bufferedIndex;
- state.bufferProcessing = true;
+ state.state |= kBufferProcessing;
if (bufferedLength > 1 && stream._writev) {
state.pendingcb -= bufferedLength - 1;
- const callback = state.allNoop ? nop : (err) => {
+ const callback = (state.state & kAllNoop) !== 0 ? nop : (err) => {
for (let n = i; n < buffered.length; ++n) {
buffered[n].callback(err);
}
};
// Make a copy of `buffered` if it's going to be used by `callback` above,
// since `doWrite` will mutate the array.
- const chunks = state.allNoop && i === 0 ?
+ const chunks = (state.state & kAllNoop) !== 0 && i === 0 ?
buffered : ArrayPrototypeSlice(buffered, i);
- chunks.allBuffers = state.allBuffers;
+ chunks.allBuffers = (state.state & kAllBuffers) !== 0;
doWrite(stream, state, true, state.length, chunks, '', callback);
@@ -577,7 +623,7 @@ function clearBuffer(stream, state) {
buffered[i++] = null;
const len = objectMode ? 1 : chunk.length;
doWrite(stream, state, false, len, chunk, encoding, callback);
- } while (i < buffered.length && !state.writing);
+ } while (i < buffered.length && (state.state & kWriting) === 0);
if (i === buffered.length) {
resetBuffer(state);
@@ -588,7 +634,7 @@ function clearBuffer(stream, state) {
state.bufferedIndex = i;
}
}
- state.bufferProcessing = false;
+ state.state &= ~kBufferProcessing;
}
Writable.prototype._write = function(chunk, encoding, cb) {
@@ -630,26 +676,26 @@ Writable.prototype.end = function(chunk, encoding, cb) {
if (err) {
// Do nothing...
- } else if (!state.errored && !state.ending) {
+ } else if (!state.errored && (state.state & kEnding) === 0) {
// This is forgiving in terms of unnecessary calls to end() and can hide
// logic errors. However, usually such errors are harmless and causing a
// hard error can be disproportionately destructive. It is not always
// trivial for the user to determine whether end() needs to be called
// or not.
- state.ending = true;
+ state.state |= kEnding;
finishMaybe(this, state, true);
- state.ended = true;
- } else if (state.finished) {
+ state.state |= kEnded;
+ } else if ((state.state & kFinished) !== 0) {
err = new ERR_STREAM_ALREADY_FINISHED('end');
- } else if (state.destroyed) {
+ } else if ((state.state & kDestroyed) !== 0) {
err = new ERR_STREAM_DESTROYED('end');
}
if (typeof cb === 'function') {
if (err) {
process.nextTick(cb, err);
- } else if (state.finished) {
+ } else if ((state.state & kFinished) !== 0) {
process.nextTick(cb, null);
} else {
state[kOnFinished].push(cb);
@@ -660,16 +706,20 @@ Writable.prototype.end = function(chunk, encoding, cb) {
};
function needFinish(state) {
- return (state.ending &&
- !state.destroyed &&
- state.constructed &&
+ return (
+ // State is ended && constructed but not destroyed, finished, writing, errorEmitted or closedEmitted
+ (state.state & (
+ kEnding |
+ kDestroyed |
+ kConstructed |
+ kFinished |
+ kWriting |
+ kErrorEmitted |
+ kCloseEmitted
+ )) === (kEnding | kConstructed) &&
state.length === 0 &&
!state.errored &&
- state.buffered.length === 0 &&
- !state.finished &&
- !state.writing &&
- !state.errorEmitted &&
- !state.closeEmitted);
+ state.buffered.length === 0);
}
function callFinal(stream, state) {
@@ -688,9 +738,9 @@ function callFinal(stream, state) {
for (let i = 0; i < onfinishCallbacks.length; i++) {
onfinishCallbacks[i](err);
}
- errorOrDestroy(stream, err, state.sync);
+ errorOrDestroy(stream, err, (state.state & kSync) !== 0);
} else if (needFinish(state)) {
- state.prefinished = true;
+ state.state |= kPrefinished;
stream.emit('prefinish');
// Backwards compat. Don't check state.sync here.
// Some streams assume 'finish' will be emitted
@@ -700,7 +750,7 @@ function callFinal(stream, state) {
}
}
- state.sync = true;
+ state.state |= kSync;
state.pendingcb++;
try {
@@ -709,16 +759,16 @@ function callFinal(stream, state) {
onFinish(err);
}
- state.sync = false;
+ state.state &= ~kSync;
}
function prefinish(stream, state) {
- if (!state.prefinished && !state.finalCalled) {
- if (typeof stream._final === 'function' && !state.destroyed) {
- state.finalCalled = true;
+ if ((state.state & (kPrefinished | kFinalCalled)) === 0) {
+ if (typeof stream._final === 'function' && (state.state & kDestroyed) === 0) {
+ state.state |= kFinalCalled;
callFinal(stream, state);
} else {
- state.prefinished = true;
+ state.state |= kPrefinished;
stream.emit('prefinish');
}
}
@@ -747,7 +797,7 @@ function finishMaybe(stream, state, sync) {
function finish(stream, state) {
state.pendingcb--;
- state.finished = true;
+ state.state |= kFinished;
const onfinishCallbacks = state[kOnFinished].splice(0);
for (let i = 0; i < onfinishCallbacks.length; i++) {
@@ -756,7 +806,7 @@ function finish(stream, state) {
stream.emit('finish');
- if (state.autoDestroy) {
+ if ((state.state & kAutoDestroy) !== 0) {
// In case of duplex streams we need a way to detect
// if the readable side is ready for autoDestroy as well.
const rState = stream._readableState;
@@ -777,20 +827,21 @@ ObjectDefineProperties(Writable.prototype, {
closed: {
__proto__: null,
get() {
- return this._writableState ? this._writableState.closed : false;
+ return this._writableState ? (this._writableState.state & kClosed) !== 0 : false;
},
},
destroyed: {
__proto__: null,
get() {
- return this._writableState ? this._writableState.destroyed : false;
+ return this._writableState ? (this._writableState.state & kDestroyed) !== 0 : false;
},
set(value) {
// Backward compatibility, the user is explicitly managing destroyed.
- if (this._writableState) {
- this._writableState.destroyed = value;
- }
+ if (!this._writableState) return;
+
+ if (value) this._writableState.state |= kDestroyed;
+ else this._writableState.state &= ~kDestroyed;
},
},
@@ -802,8 +853,8 @@ ObjectDefineProperties(Writable.prototype, {
// where the writable side was disabled upon construction.
// Compat. The user might manually disable writable side through
// deprecated setter.
- return !!w && w.writable !== false && !w.destroyed && !w.errored &&
- !w.ending && !w.ended;
+ return !!w && w.writable !== false && !w.errored &&
+ (w.state & (kEnding | kEnded | kDestroyed)) === 0;
},
set(val) {
// Backwards compatible.
@@ -816,14 +867,14 @@ ObjectDefineProperties(Writable.prototype, {
writableFinished: {
__proto__: null,
get() {
- return this._writableState ? this._writableState.finished : false;
+ return this._writableState ? (this._writableState.state & kFinished) !== 0 : false;
},
},
writableObjectMode: {
__proto__: null,
get() {
- return this._writableState ? this._writableState.objectMode : false;
+ return this._writableState ? (this._writableState.state & kObjectMode) !== 0 : false;
},
},
@@ -837,7 +888,7 @@ ObjectDefineProperties(Writable.prototype, {
writableEnded: {
__proto__: null,
get() {
- return this._writableState ? this._writableState.ending : false;
+ return this._writableState ? (this._writableState.state & kEnding) !== 0 : false;
},
},
@@ -846,7 +897,9 @@ ObjectDefineProperties(Writable.prototype, {
get() {
const wState = this._writableState;
if (!wState) return false;
- return !wState.destroyed && !wState.ending && wState.needDrain;
+
+ // !destroyed && !ending && needDrain
+ return (wState.state & (kDestroyed | kEnding | kNeedDrain)) === kNeedDrain;
},
},
@@ -885,8 +938,8 @@ ObjectDefineProperties(Writable.prototype, {
get: function() {
return !!(
this._writableState.writable !== false &&
- (this._writableState.destroyed || this._writableState.errored) &&
- !this._writableState.finished
+ ((this._writableState.state & kDestroyed) !== 0 || this._writableState.errored) &&
+ (this._writableState.state & kFinished) === 0
);
},
},
@@ -897,7 +950,7 @@ Writable.prototype.destroy = function(err, cb) {
const state = this._writableState;
// Invoke pending callbacks.
- if (!state.destroyed &&
+ if ((state.state & kDestroyed) === 0 &&
(state.bufferedIndex < state.buffered.length ||
state[kOnFinished].length)) {
process.nextTick(errorBuffer, state);
diff --git a/lib/internal/test_runner/coverage.js b/lib/internal/test_runner/coverage.js
index 1ed45028f61575..7727ab006052ba 100644
--- a/lib/internal/test_runner/coverage.js
+++ b/lib/internal/test_runner/coverage.js
@@ -25,7 +25,7 @@ const {
const { setupCoverageHooks } = require('internal/util');
const { tmpdir } = require('os');
const { join, resolve } = require('path');
-const { fileURLToPath } = require('url');
+const { fileURLToPath } = require('internal/url');
const kCoverageFileRegex = /^coverage-(\d+)-(\d{13})-(\d+)\.json$/;
const kIgnoreRegex = /\/\* node:coverage ignore next (?\d+ )?\*\//;
const kLineEndingRegex = /\r?\n$/u;
@@ -338,7 +338,7 @@ function mapRangeToLines(range, lines) {
mid = MathFloor((start + end) / 2);
let line = lines[mid];
- if (startOffset >= line.startOffset && startOffset <= line.endOffset) {
+ if (startOffset >= line?.startOffset && startOffset <= line?.endOffset) {
while (endOffset > line?.startOffset) {
// If the range is not covered, and the range covers the entire line,
// then mark that line as not covered.
@@ -363,7 +363,7 @@ function mapRangeToLines(range, lines) {
}
break;
- } else if (startOffset >= line.endOffset) {
+ } else if (startOffset >= line?.endOffset) {
start = mid + 1;
} else {
end = mid - 1;
@@ -538,4 +538,4 @@ function doesRangeContainOtherRange(range, otherRange) {
range.endOffset >= otherRange.endOffset;
}
-module.exports = { setupCoverage };
+module.exports = { setupCoverage, TestCoverage };
diff --git a/lib/internal/test_runner/mock/mock.js b/lib/internal/test_runner/mock/mock.js
index a704b41996e6d2..838a530ddd4902 100644
--- a/lib/internal/test_runner/mock/mock.js
+++ b/lib/internal/test_runner/mock/mock.js
@@ -45,19 +45,36 @@ class MockFunctionContext {
this.#times = times;
}
+ /**
+ * Gets an array of recorded calls made to the mock function.
+ * @returns {Array} An array of recorded calls.
+ */
get calls() {
return ArrayPrototypeSlice(this.#calls, 0);
}
+ /**
+ * Retrieves the number of times the mock function has been called.
+ * @returns {number} The call count.
+ */
callCount() {
return this.#calls.length;
}
+ /**
+ * Sets a new implementation for the mock function.
+ * @param {Function} implementation - The new implementation for the mock function.
+ */
mockImplementation(implementation) {
validateFunction(implementation, 'implementation');
this.#implementation = implementation;
}
+ /**
+ * Replaces the implementation of the function only once.
+ * @param {Function} implementation - The substitute function.
+ * @param {number} [onCall] - The call index to be replaced.
+ */
mockImplementationOnce(implementation, onCall) {
validateFunction(implementation, 'implementation');
const nextCall = this.#calls.length;
@@ -66,6 +83,9 @@ class MockFunctionContext {
this.#mocks.set(call, implementation);
}
+ /**
+ * Restores the original function that was mocked.
+ */
restore() {
const { descriptor, object, original, methodName } = this.#restore;
@@ -79,14 +99,25 @@ class MockFunctionContext {
}
}
+ /**
+ * Resets the recorded calls to the mock function
+ */
resetCalls() {
this.#calls = [];
}
+ /**
+ * Tracks a call made to the mock function.
+ * @param {object} call - The call details.
+ */
trackCall(call) {
ArrayPrototypePush(this.#calls, call);
}
+ /**
+ * Gets the next implementation to use for the mock function.
+ * @returns {Function} The next implementation.
+ */
nextImpl() {
const nextCall = this.#calls.length;
const mock = this.#mocks.get(nextCall);
@@ -109,11 +140,23 @@ class MockTracker {
#mocks = [];
#timers;
+ /**
+ * Returns the mock timers of this MockTracker instance.
+ * @returns {MockTimers} The mock timers instance.
+ */
get timers() {
this.#timers ??= new MockTimers();
return this.#timers;
}
+ /**
+ * Creates a mock function tracker.
+ * @param {Function} [original] - The original function to be tracked.
+ * @param {Function} [implementation] - An optional replacement function for the original one.
+ * @param {object} [options] - Additional tracking options.
+ * @param {number} [options.times=Infinity] - The maximum number of times the mock function can be called.
+ * @returns {ProxyConstructor} The mock function tracker.
+ */
fn(
original = function() {},
implementation = original,
@@ -137,6 +180,17 @@ class MockTracker {
return this.#setupMock(ctx, original);
}
+ /**
+ * Creates a method tracker for a specified object or function.
+ * @param {(object | Function)} objectOrFunction - The object or function containing the method to be tracked.
+ * @param {string} methodName - The name of the method to be tracked.
+ * @param {Function} [implementation] - An optional replacement function for the original method.
+ * @param {object} [options] - Additional tracking options.
+ * @param {boolean} [options.getter=false] - Indicates whether this is a getter method.
+ * @param {boolean} [options.setter=false] - Indicates whether this is a setter method.
+ * @param {number} [options.times=Infinity] - The maximum number of times the mock method can be called.
+ * @returns {ProxyConstructor} The mock method tracker.
+ */
method(
objectOrFunction,
methodName,
@@ -216,6 +270,18 @@ class MockTracker {
return mock;
}
+ /**
+ * Mocks a getter method of an object.
+ * This is a syntax sugar for the MockTracker.method with options.getter set to true
+ * @param {object} object - The target object.
+ * @param {string} methodName - The name of the getter method to be mocked.
+ * @param {Function} [implementation] - An optional replacement function for the targeted method.
+ * @param {object} [options] - Additional tracking options.
+ * @param {boolean} [options.getter=true] - Indicates whether this is a getter method.
+ * @param {boolean} [options.setter=false] - Indicates whether this is a setter method.
+ * @param {number} [options.times=Infinity] - The maximum number of times the mock method can be called.
+ * @returns {ProxyConstructor} The mock method tracker.
+ */
getter(
object,
methodName,
@@ -244,6 +310,18 @@ class MockTracker {
});
}
+ /**
+ * Mocks a setter method of an object.
+ * This function is a syntax sugar for MockTracker.method with options.setter set to true.
+ * @param {object} object - The target object.
+ * @param {string} methodName - The setter method to be mocked.
+ * @param {Function} [implementation] - An optional replacement function for the targeted method.
+ * @param {object} [options] - Additional tracking options.
+ * @param {boolean} [options.getter=false] - Indicates whether this is a getter method.
+ * @param {boolean} [options.setter=true] - Indicates whether this is a setter method.
+ * @param {number} [options.times=Infinity] - The maximum number of times the mock method can be called.
+ * @returns {ProxyConstructor} The mock method tracker.
+ */
setter(
object,
methodName,
@@ -272,12 +350,18 @@ class MockTracker {
});
}
+ /**
+ * Resets the mock tracker, restoring all mocks and clearing timers.
+ */
reset() {
this.restoreAll();
this.#timers?.reset();
this.#mocks = [];
}
+ /**
+ * Restore all mocks created by this MockTracker instance.
+ */
restoreAll() {
for (let i = 0; i < this.#mocks.length; i++) {
FunctionPrototypeCall(restore, this.#mocks[i]);
diff --git a/lib/internal/test_runner/mock/mock_timers.js b/lib/internal/test_runner/mock/mock_timers.js
index 7e38f9f7b5113c..1ff6489885fe30 100644
--- a/lib/internal/test_runner/mock/mock_timers.js
+++ b/lib/internal/test_runner/mock/mock_timers.js
@@ -372,7 +372,7 @@ class MockTimers {
ObjectDefineProperty(
nodeTimers,
'setTimeout',
- this.#realSetTimeout,
+ this.#realTimersSetTimeout,
);
ObjectDefineProperty(
nodeTimers,
@@ -455,6 +455,13 @@ class MockTimers {
);
}
+ /**
+ * Advances the virtual time of MockTimers by the specified duration (in milliseconds).
+ * This method simulates the passage of time and triggers any scheduled timers that are due.
+ * @param {number} [time=1] - The amount of time (in milliseconds) to advance the virtual time.
+ * @throws {ERR_INVALID_STATE} If MockTimers are not enabled.
+ * @throws {ERR_INVALID_ARG_VALUE} If a negative time value is provided.
+ */
tick(time = 1) {
if (!this.#isEnabled) {
throw new ERR_INVALID_STATE(
@@ -488,6 +495,12 @@ class MockTimers {
}
}
+ /**
+ * Enables MockTimers for the specified timers.
+ * @param {string[]} timers - An array of timer types to enable, e.g., ['setTimeout', 'setInterval'].
+ * @throws {ERR_INVALID_STATE} If MockTimers are already enabled.
+ * @throws {ERR_INVALID_ARG_VALUE} If an unsupported timer type is specified.
+ */
enable(timers = SUPPORTED_TIMERS) {
if (this.#isEnabled) {
throw new ERR_INVALID_STATE(
@@ -513,10 +526,17 @@ class MockTimers {
this.#toggleEnableTimers(true);
}
+ /**
+ * An alias for `this.reset()`, allowing the disposal of the `MockTimers` instance.
+ */
[SymbolDispose]() {
this.reset();
}
+ /**
+ * Resets MockTimers, disabling any enabled timers and clearing the execution queue.
+ * Does nothing if MockTimers are not enabled.
+ */
reset() {
// Ignore if not enabled
if (!this.#isEnabled) return;
@@ -531,6 +551,10 @@ class MockTimers {
}
}
+ /**
+ * Runs all scheduled timers until there are no more pending timers.
+ * @throws {ERR_INVALID_STATE} If MockTimers are not enabled.
+ */
runAll() {
if (!this.#isEnabled) {
throw new ERR_INVALID_STATE(
diff --git a/lib/internal/test_runner/reporter/junit.js b/lib/internal/test_runner/reporter/junit.js
new file mode 100644
index 00000000000000..b45c233861c000
--- /dev/null
+++ b/lib/internal/test_runner/reporter/junit.js
@@ -0,0 +1,158 @@
+'use strict';
+const {
+ ArrayPrototypeFilter,
+ ArrayPrototypeMap,
+ ArrayPrototypeJoin,
+ ArrayPrototypePush,
+ ArrayPrototypeSome,
+ NumberPrototypeToFixed,
+ ObjectEntries,
+ RegExpPrototypeSymbolReplace,
+ String,
+ StringPrototypeRepeat,
+} = primordials;
+
+const { inspectWithNoCustomRetry } = require('internal/errors');
+const { hostname } = require('os');
+
+const inspectOptions = { __proto__: null, colors: false, breakLength: Infinity };
+const HOSTNAME = hostname();
+
+function escapeAttribute(s = '') {
+ return escapeContent(RegExpPrototypeSymbolReplace(/"/g, RegExpPrototypeSymbolReplace(/\n/g, s, ''), '"'));
+}
+
+function escapeContent(s = '') {
+ return RegExpPrototypeSymbolReplace(/\n`;
+ }
+ const attrsString = ArrayPrototypeJoin(ArrayPrototypeMap(ObjectEntries(attrs)
+ , ({ 0: key, 1: value }) => `${key}="${escapeAttribute(String(value))}"`)
+ , ' ');
+ if (!children?.length) {
+ return `${indent}<${tag} ${attrsString}/>\n`;
+ }
+ const childrenString = ArrayPrototypeJoin(ArrayPrototypeMap(children ?? [], treeToXML), '');
+ return `${indent}<${tag} ${attrsString}>\n${childrenString}${indent}${tag}>\n`;
+}
+
+function isFailure(node) {
+ return (node?.children && ArrayPrototypeSome(node.children, (c) => c.tag === 'failure')) || node?.attrs?.failures;
+}
+
+function isSkipped(node) {
+ return (node?.children && ArrayPrototypeSome(node.children, (c) => c.tag === 'skipped')) || node?.attrs?.failures;
+}
+
+module.exports = async function* junitReporter(source) {
+ yield '\n';
+ yield '\n';
+ let currentSuite = null;
+ const roots = [];
+
+ function startTest(event) {
+ const originalSuite = currentSuite;
+ currentSuite = {
+ __proto__: null,
+ attrs: { __proto__: null, name: event.data.name },
+ nesting: event.data.nesting,
+ parent: currentSuite,
+ children: [],
+ };
+ if (originalSuite?.children) {
+ ArrayPrototypePush(originalSuite.children, currentSuite);
+ }
+ if (!currentSuite.parent) {
+ ArrayPrototypePush(roots, currentSuite);
+ }
+ }
+
+ for await (const event of source) {
+ switch (event.type) {
+ case 'test:start': {
+ startTest(event);
+ break;
+ }
+ case 'test:pass':
+ case 'test:fail': {
+ if (!currentSuite) {
+ startTest({ __proto__: null, data: { __proto__: null, name: 'root', nesting: 0 } });
+ }
+ if (currentSuite.attrs.name !== event.data.name ||
+ currentSuite.nesting !== event.data.nesting) {
+ startTest(event);
+ }
+ const currentTest = currentSuite;
+ if (currentSuite?.nesting === event.data.nesting) {
+ currentSuite = currentSuite.parent;
+ }
+ currentTest.attrs.time = NumberPrototypeToFixed(event.data.details.duration_ms / 1000, 6);
+ const nonCommentChildren = ArrayPrototypeFilter(currentTest.children, (c) => c.comment == null);
+ if (nonCommentChildren.length > 0) {
+ currentTest.tag = 'testsuite';
+ currentTest.attrs.disabled = 0;
+ currentTest.attrs.errors = 0;
+ currentTest.attrs.tests = nonCommentChildren.length;
+ currentTest.attrs.failures = ArrayPrototypeFilter(currentTest.children, isFailure).length;
+ currentTest.attrs.skipped = ArrayPrototypeFilter(currentTest.children, isSkipped).length;
+ currentTest.attrs.hostname = HOSTNAME;
+ } else {
+ currentTest.tag = 'testcase';
+ currentTest.attrs.classname = event.data.classname ?? 'test';
+ if (event.data.skip) {
+ ArrayPrototypePush(currentTest.children, {
+ __proto__: null, nesting: event.data.nesting + 1, tag: 'skipped',
+ attrs: { __proto__: null, type: 'skipped', message: event.data.skip },
+ });
+ }
+ if (event.data.todo) {
+ ArrayPrototypePush(currentTest.children, {
+ __proto__: null, nesting: event.data.nesting + 1, tag: 'skipped',
+ attrs: { __proto__: null, type: 'todo', message: event.data.todo },
+ });
+ }
+ if (event.type === 'test:fail') {
+ const error = event.data.details?.error;
+ currentTest.children.push({
+ __proto__: null,
+ nesting: event.data.nesting + 1,
+ tag: 'failure',
+ attrs: { __proto__: null, type: error?.failureType || error?.code, message: error?.message ?? '' },
+ children: [inspectWithNoCustomRetry(error, inspectOptions)],
+ });
+ currentTest.failures = 1;
+ currentTest.attrs.failure = error?.message ?? '';
+ }
+ }
+ break;
+ }
+ case 'test:diagnostic': {
+ const parent = currentSuite?.children ?? roots;
+ ArrayPrototypePush(parent, {
+ __proto__: null, nesting: event.data.nesting, comment: event.data.message,
+ });
+ break;
+ } default:
+ break;
+ }
+ }
+ for (const suite of roots) {
+ yield treeToXML(suite);
+ }
+ yield '\n';
+};
diff --git a/lib/internal/test_runner/runner.js b/lib/internal/test_runner/runner.js
index fdaa981eece4e3..4832d6b8179a62 100644
--- a/lib/internal/test_runner/runner.js
+++ b/lib/internal/test_runner/runner.js
@@ -154,14 +154,17 @@ function filterExecArgv(arg, i, arr) {
!ArrayPrototypeSome(kFilterArgValues, (p) => arg === p || (i > 0 && arr[i - 1] === p) || StringPrototypeStartsWith(arg, `${p}=`));
}
-function getRunArgs({ path, inspectPort, testNamePatterns }) {
+function getRunArgs(path, { inspectPort, testNamePatterns, only }) {
const argv = ArrayPrototypeFilter(process.execArgv, filterExecArgv);
if (isUsingInspector()) {
ArrayPrototypePush(argv, `--inspect-port=${getInspectPort(inspectPort)}`);
}
- if (testNamePatterns) {
+ if (testNamePatterns != null) {
ArrayPrototypeForEach(testNamePatterns, (pattern) => ArrayPrototypePush(argv, `--test-name-pattern=${pattern}`));
}
+ if (only === true) {
+ ArrayPrototypePush(argv, '--test-only');
+ }
ArrayPrototypePush(argv, path);
return argv;
@@ -345,17 +348,17 @@ class FileTest extends Test {
}
}
-function runTestFile(path, root, inspectPort, filesWatcher, testNamePatterns) {
+function runTestFile(path, filesWatcher, opts) {
const watchMode = filesWatcher != null;
- const subtest = root.createSubtest(FileTest, path, async (t) => {
- const args = getRunArgs({ __proto__: null, path, inspectPort, testNamePatterns });
+ const subtest = opts.root.createSubtest(FileTest, path, async (t) => {
+ const args = getRunArgs(path, opts);
const stdio = ['pipe', 'pipe', 'pipe'];
const env = { __proto__: null, ...process.env, NODE_TEST_CONTEXT: 'child-v8' };
if (watchMode) {
stdio.push('ipc');
env.WATCH_REPORT_DEPENDENCIES = '1';
}
- if (root.harness.shouldColorizeTestFiles) {
+ if (opts.root.harness.shouldColorizeTestFiles) {
env.FORCE_COLOR = '1';
}
@@ -402,7 +405,7 @@ function runTestFile(path, root, inspectPort, filesWatcher, testNamePatterns) {
filesWatcher.runningProcesses.delete(path);
filesWatcher.runningSubtests.delete(path);
if (filesWatcher.runningSubtests.size === 0) {
- root.reporter[kEmitMessage]('test:watch:drained');
+ opts.root.reporter[kEmitMessage]('test:watch:drained');
}
}
@@ -425,10 +428,10 @@ function runTestFile(path, root, inspectPort, filesWatcher, testNamePatterns) {
return subtest.start();
}
-function watchFiles(testFiles, root, inspectPort, signal, testNamePatterns) {
+function watchFiles(testFiles, opts) {
const runningProcesses = new SafeMap();
const runningSubtests = new SafeMap();
- const watcher = new FilesWatcher({ __proto__: null, debounce: 200, mode: 'filter', signal });
+ const watcher = new FilesWatcher({ __proto__: null, debounce: 200, mode: 'filter', signal: opts.signal });
const filesWatcher = { __proto__: null, watcher, runningProcesses, runningSubtests };
watcher.on('changed', ({ owners }) => {
@@ -444,19 +447,19 @@ function watchFiles(testFiles, root, inspectPort, signal, testNamePatterns) {
}
if (!runningSubtests.size) {
// Reset the topLevel counter
- root.harness.counters.topLevel = 0;
+ opts.root.harness.counters.topLevel = 0;
}
await runningSubtests.get(file);
- runningSubtests.set(file, runTestFile(file, root, inspectPort, filesWatcher, testNamePatterns));
+ runningSubtests.set(file, runTestFile(file, filesWatcher, opts));
}, undefined, (error) => {
triggerUncaughtException(error, true /* fromPromise */);
}));
});
- if (signal) {
+ if (opts.signal) {
kResistStopPropagation ??= require('internal/event_target').kResistStopPropagation;
- signal.addEventListener(
+ opts.signal.addEventListener(
'abort',
- () => root.postRun(),
+ () => opts.root.postRun(),
{ __proto__: null, once: true, [kResistStopPropagation]: true },
);
}
@@ -469,7 +472,7 @@ function run(options) {
options = kEmptyObject;
}
let { testNamePatterns, shard } = options;
- const { concurrency, timeout, signal, files, inspectPort, watch, setup } = options;
+ const { concurrency, timeout, signal, files, inspectPort, watch, setup, only } = options;
if (files != null) {
validateArray(files, 'options.files');
@@ -477,6 +480,9 @@ function run(options) {
if (watch != null) {
validateBoolean(watch, 'options.watch');
}
+ if (only != null) {
+ validateBoolean(only, 'options.only');
+ }
if (shard != null) {
validateObject(shard, 'options.shard');
// Avoid re-evaluating the shard object in case it's a getter
@@ -522,14 +528,15 @@ function run(options) {
let postRun = () => root.postRun();
let filesWatcher;
+ const opts = { __proto__: null, root, signal, inspectPort, testNamePatterns, only };
if (watch) {
- filesWatcher = watchFiles(testFiles, root, inspectPort, signal, testNamePatterns);
+ filesWatcher = watchFiles(testFiles, opts);
postRun = undefined;
}
const runFiles = () => {
root.harness.bootstrapComplete = true;
return SafePromiseAllSettledReturnVoid(testFiles, (path) => {
- const subtest = runTestFile(path, root, inspectPort, filesWatcher, testNamePatterns);
+ const subtest = runTestFile(path, filesWatcher, opts);
filesWatcher?.runningSubtests.set(path, subtest);
return subtest;
});
diff --git a/lib/internal/test_runner/test.js b/lib/internal/test_runner/test.js
index 975ad4ac08b41f..4afb93f4a60df0 100644
--- a/lib/internal/test_runner/test.js
+++ b/lib/internal/test_runner/test.js
@@ -737,6 +737,8 @@ class Test extends AsyncResource {
this.reported = true;
reporter.plan(nesting, loc, harness.counters.topLevel);
+ // Call this harness.coverage() before collecting diagnostics, since failure to collect coverage is a diagnostic.
+ const coverage = harness.coverage();
for (let i = 0; i < diagnostics.length; i++) {
reporter.diagnostic(nesting, loc, diagnostics[i]);
}
@@ -750,8 +752,6 @@ class Test extends AsyncResource {
reporter.diagnostic(nesting, loc, `todo ${harness.counters.todo}`);
reporter.diagnostic(nesting, loc, `duration_ms ${this.duration()}`);
- const coverage = harness.coverage();
-
if (coverage) {
reporter.coverage(nesting, loc, coverage);
}
diff --git a/lib/internal/test_runner/utils.js b/lib/internal/test_runner/utils.js
index d2cabbac9a2c66..ba1b4f0fa10869 100644
--- a/lib/internal/test_runner/utils.js
+++ b/lib/internal/test_runner/utils.js
@@ -116,6 +116,7 @@ const kBuiltinReporters = new SafeMap([
['spec', 'internal/test_runner/reporter/spec'],
['dot', 'internal/test_runner/reporter/dot'],
['tap', 'internal/test_runner/reporter/tap'],
+ ['junit', 'internal/test_runner/reporter/junit'],
]);
const kDefaultReporter = process.stdout.isTTY ? 'spec' : 'tap';
diff --git a/lib/internal/url.js b/lib/internal/url.js
index 37f67e6792959c..8d5926e8fcb9df 100644
--- a/lib/internal/url.js
+++ b/lib/internal/url.js
@@ -772,13 +772,7 @@ class URL {
base = `${base}`;
}
- const href = bindingUrl.parse(input, base);
-
- if (!href) {
- throw new ERR_INVALID_URL(input);
- }
-
- this.#updateContext(href);
+ this.#updateContext(bindingUrl.parse(input, base));
}
[inspect.custom](depth, opts) {
diff --git a/lib/internal/util.js b/lib/internal/util.js
index 3586084ba7b8bd..d35867635ca07b 100644
--- a/lib/internal/util.js
+++ b/lib/internal/util.js
@@ -358,6 +358,36 @@ function getConstructorOf(obj) {
return null;
}
+let cachedURL;
+let cachedCWD;
+
+/**
+ * Get the current working directory while accounting for the possibility that it has been deleted.
+ * `process.cwd()` can fail if the parent directory is deleted while the process runs.
+ * @returns {URL} The current working directory or the volume root if it cannot be determined.
+ */
+function getCWDURL() {
+ const { sep } = require('path');
+ const { pathToFileURL } = require('internal/url');
+
+ let cwd;
+
+ try {
+ // The implementation of `process.cwd()` already uses proper cache when it can.
+ // It's a relatively cheap call performance-wise for the most common use case.
+ cwd = process.cwd();
+ } catch {
+ cachedURL ??= pathToFileURL(sep);
+ }
+
+ if (cwd != null && cwd !== cachedCWD) {
+ cachedURL = pathToFileURL(cwd + sep);
+ cachedCWD = cwd;
+ }
+
+ return cachedURL;
+}
+
function getSystemErrorName(err) {
const entry = uvErrmapGet(err);
return entry ? entry[0] : `Unknown system error ${err}`;
@@ -850,6 +880,7 @@ module.exports = {
filterDuplicateStrings,
filterOwnProperties,
getConstructorOf,
+ getCWDURL,
getInternalGlobal,
getSystemErrorMap,
getSystemErrorName,
diff --git a/lib/internal/watch_mode/files_watcher.js b/lib/internal/watch_mode/files_watcher.js
index b38f94d7cc8051..895c6ec138d131 100644
--- a/lib/internal/watch_mode/files_watcher.js
+++ b/lib/internal/watch_mode/files_watcher.js
@@ -14,7 +14,7 @@ const { TIMEOUT_MAX } = require('internal/timers');
const EventEmitter = require('events');
const { watch } = require('fs');
-const { fileURLToPath } = require('url');
+const { fileURLToPath } = require('internal/url');
const { resolve, dirname } = require('path');
const { setTimeout } = require('timers');
diff --git a/lib/internal/webstreams/readablestream.js b/lib/internal/webstreams/readablestream.js
index 3d70f97a84bb47..5603d2bfd8500f 100644
--- a/lib/internal/webstreams/readablestream.js
+++ b/lib/internal/webstreams/readablestream.js
@@ -14,7 +14,6 @@ const {
ObjectCreate,
ObjectDefineProperties,
ObjectSetPrototypeOf,
- Promise,
PromisePrototypeThen,
PromiseResolve,
PromiseReject,
@@ -479,9 +478,13 @@ class ReadableStream {
// eslint-disable-next-line no-use-before-define
const reader = new ReadableStreamDefaultReader(this);
- let done = false;
+
+ // No __proto__ here to avoid the performance hit.
+ const state = {
+ done: false,
+ current: undefined,
+ };
let started = false;
- let current;
// The nextSteps function is not an async function in order
// to make it more efficient. Because nextSteps explicitly
@@ -490,7 +493,7 @@ class ReadableStream {
// unnecessary Promise allocations to occur, which just add
// cost.
function nextSteps() {
- if (done)
+ if (state.done)
return PromiseResolve({ done: true, value: undefined });
if (reader[kState].stream === undefined) {
@@ -500,31 +503,15 @@ class ReadableStream {
}
const promise = createDeferredPromise();
- readableStreamDefaultReaderRead(reader, {
- [kChunk](chunk) {
- current = undefined;
- promise.resolve({ value: chunk, done: false });
- },
- [kClose]() {
- current = undefined;
- done = true;
- readableStreamReaderGenericRelease(reader);
- promise.resolve({ done: true, value: undefined });
- },
- [kError](error) {
- current = undefined;
- done = true;
- readableStreamReaderGenericRelease(reader);
- promise.reject(error);
- },
- });
+ // eslint-disable-next-line no-use-before-define
+ readableStreamDefaultReaderRead(reader, new ReadableStreamAsyncIteratorReadRequest(reader, state, promise));
return promise.promise;
}
async function returnSteps(value) {
- if (done)
+ if (state.done)
return { done: true, value }; // eslint-disable-line node-core/avoid-prototype-pollution
- done = true;
+ state.done = true;
if (reader[kState].stream === undefined) {
throw new ERR_INVALID_STATE.TypeError(
@@ -561,19 +548,19 @@ class ReadableStream {
// need to investigate if it's a bug in our impl or
// the spec.
if (!started) {
- current = PromiseResolve();
+ state.current = PromiseResolve();
started = true;
}
- current = current !== undefined ?
- PromisePrototypeThen(current, nextSteps, nextSteps) :
+ state.current = state.current !== undefined ?
+ PromisePrototypeThen(state.current, nextSteps, nextSteps) :
nextSteps();
- return current;
+ return state.current;
},
return(error) {
- return current ?
+ return state.current ?
PromisePrototypeThen(
- current,
+ state.current,
() => returnSteps(error),
() => returnSteps(error)) :
returnSteps(error);
@@ -774,6 +761,33 @@ function createReadableStreamBYOBRequest(controller, view) {
return stream;
}
+class ReadableStreamAsyncIteratorReadRequest {
+ constructor(reader, state, promise) {
+ this.reader = reader;
+ this.state = state;
+ this.promise = promise;
+ }
+
+ [kChunk](chunk) {
+ this.state.current = undefined;
+ this.promise.resolve({ value: chunk, done: false });
+ }
+
+ [kClose]() {
+ this.state.current = undefined;
+ this.state.done = true;
+ readableStreamReaderGenericRelease(this.reader);
+ this.promise.resolve({ done: true, value: undefined });
+ }
+
+ [kError](error) {
+ this.state.current = undefined;
+ this.state.done = true;
+ readableStreamReaderGenericRelease(this.reader);
+ this.promise.reject(error);
+ }
+}
+
class DefaultReadRequest {
constructor() {
this[kState] = createDeferredPromise();
@@ -1339,7 +1353,9 @@ function readableStreamPipeTo(
const promise = createDeferredPromise();
- let currentWrite = PromiseResolve();
+ const state = {
+ currentWrite: PromiseResolve(),
+ };
// The error here can be undefined. The rejected arg
// tells us that the promise must be rejected even
@@ -1356,9 +1372,9 @@ function readableStreamPipeTo(
}
async function waitForCurrentWrite() {
- const write = currentWrite;
+ const write = state.currentWrite;
await write;
- if (write !== currentWrite)
+ if (write !== state.currentWrite)
await waitForCurrentWrite();
}
@@ -1449,20 +1465,14 @@ function readableStreamPipeTo(
async function step() {
if (shuttingDown)
return true;
+
await writer[kState].ready.promise;
- return new Promise((resolve, reject) => {
- readableStreamDefaultReaderRead(
- reader,
- {
- [kChunk](chunk) {
- currentWrite = writableStreamDefaultWriterWrite(writer, chunk);
- setPromiseHandled(currentWrite);
- resolve(false);
- },
- [kClose]: () => resolve(true),
- [kError]: reject,
- });
- });
+
+ const promise = createDeferredPromise();
+ // eslint-disable-next-line no-use-before-define
+ readableStreamDefaultReaderRead(reader, new PipeToReadableStreamReadRequest(writer, state, promise));
+
+ return promise.promise;
}
async function run() {
@@ -1524,6 +1534,28 @@ function readableStreamPipeTo(
return promise.promise;
}
+class PipeToReadableStreamReadRequest {
+ constructor(writer, state, promise) {
+ this.writer = writer;
+ this.state = state;
+ this.promise = promise;
+ }
+
+ [kChunk](chunk) {
+ this.state.currentWrite = writableStreamDefaultWriterWrite(this.writer, chunk);
+ setPromiseHandled(this.state.currentWrite);
+ this.promise.resolve(false);
+ }
+
+ [kClose]() {
+ this.promise.resolve(true);
+ }
+
+ [kError](error) {
+ this.promise.reject(error);
+ }
+}
+
function readableStreamTee(stream, cloneForBranch2) {
if (isReadableByteStreamController(stream[kState].controller)) {
return readableByteStreamTee(stream);
diff --git a/lib/repl.js b/lib/repl.js
index 2a63050923b198..931ae5087c8fb3 100644
--- a/lib/repl.js
+++ b/lib/repl.js
@@ -204,6 +204,7 @@ const domainSet = new SafeWeakSet();
const kBufferedCommandSymbol = Symbol('bufferedCommand');
const kContextId = Symbol('contextId');
+const kLoadingSymbol = Symbol('loading');
let addedNewListener = false;
@@ -467,7 +468,7 @@ function REPLServer(prompt,
if (e.name === 'SyntaxError') {
let parentURL;
try {
- const { pathToFileURL } = require('url');
+ const { pathToFileURL } = require('internal/url');
// Adding `/repl` prevents dynamic imports from loading relative
// to the parent of `process.cwd()`.
parentURL = pathToFileURL(path.join(process.cwd(), 'repl')).href;
@@ -508,7 +509,7 @@ function REPLServer(prompt,
if (err === null) {
let parentURL;
try {
- const { pathToFileURL } = require('url');
+ const { pathToFileURL } = require('internal/url');
// Adding `/repl` prevents dynamic imports from loading relative
// to the parent of `process.cwd()`.
parentURL = pathToFileURL(path.join(process.cwd(), 'repl')).href;
@@ -882,7 +883,7 @@ function REPLServer(prompt,
self[kBufferedCommandSymbol] += cmd + '\n';
// code alignment
- const matches = self._sawKeyPress ?
+ const matches = self._sawKeyPress && !self[kLoadingSymbol] ?
RegExpPrototypeExec(/^\s+/, cmd) : null;
if (matches) {
const prefix = matches[0];
@@ -1801,8 +1802,10 @@ function defineDefaultCommands(repl) {
const stats = fs.statSync(file);
if (stats && stats.isFile()) {
_turnOnEditorMode(this);
+ this[kLoadingSymbol] = true;
const data = fs.readFileSync(file, 'utf8');
this.write(data);
+ this[kLoadingSymbol] = false;
_turnOffEditorMode(this);
this.write('\n');
} else {
diff --git a/lib/test/reporters.js b/lib/test/reporters.js
index 86aea679b52a7a..06a0b27ee58275 100644
--- a/lib/test/reporters.js
+++ b/lib/test/reporters.js
@@ -3,6 +3,7 @@
const { ObjectDefineProperties, ReflectConstruct } = primordials;
let dot;
+let junit;
let spec;
let tap;
@@ -17,6 +18,15 @@ ObjectDefineProperties(module.exports, {
return dot;
},
},
+ junit: {
+ __proto__: null,
+ configurable: true,
+ enumerable: true,
+ get() {
+ junit ??= require('internal/test_runner/reporter/junit');
+ return junit;
+ },
+ },
spec: {
__proto__: null,
configurable: true,
diff --git a/node.gyp b/node.gyp
index 22a45eb8fd3ac8..4e614df1a9b9ad 100644
--- a/node.gyp
+++ b/node.gyp
@@ -10,6 +10,7 @@
'node_use_v8_platform%': 'true',
'node_use_bundled_v8%': 'true',
'node_shared%': 'false',
+ 'node_write_snapshot_as_string_literals': 'true',
'force_dynamic_crt%': 0,
'ossfuzz' : 'false',
'node_module_version%': '',
@@ -367,6 +368,38 @@
'src/quic/tokens.h',
'src/quic/transportparams.h',
],
+ 'node_cctest_sources': [
+ 'src/node_snapshot_stub.cc',
+ 'test/cctest/node_test_fixture.cc',
+ 'test/cctest/node_test_fixture.h',
+ 'test/cctest/test_aliased_buffer.cc',
+ 'test/cctest/test_base64.cc',
+ 'test/cctest/test_base_object_ptr.cc',
+ 'test/cctest/test_cppgc.cc',
+ 'test/cctest/test_node_postmortem_metadata.cc',
+ 'test/cctest/test_environment.cc',
+ 'test/cctest/test_linked_binding.cc',
+ 'test/cctest/test_node_api.cc',
+ 'test/cctest/test_per_process.cc',
+ 'test/cctest/test_platform.cc',
+ 'test/cctest/test_report.cc',
+ 'test/cctest/test_json_utils.cc',
+ 'test/cctest/test_sockaddr.cc',
+ 'test/cctest/test_traced_value.cc',
+ 'test/cctest/test_util.cc',
+ 'test/cctest/test_dataqueue.cc',
+ ],
+ 'node_cctest_openssl_sources': [
+ 'test/cctest/test_crypto_clienthello.cc',
+ 'test/cctest/test_node_crypto.cc',
+ 'test/cctest/test_node_crypto_env.cc',
+ 'test/cctest/test_quic_cid.cc',
+ 'test/cctest/test_quic_tokens.cc',
+ ],
+ 'node_cctest_inspector_sources': [
+ 'test/cctest/test_inspector_socket.cc',
+ 'test/cctest/test_inspector_socket_server.cc',
+ ],
'node_mksnapshot_exec': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)node_mksnapshot<(EXECUTABLE_SUFFIX)',
'node_js2c_exec': '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)node_js2c<(EXECUTABLE_SUFFIX)',
'conditions': [
@@ -1030,49 +1063,20 @@
'NODE_WANT_INTERNALS=1',
],
- 'sources': [
- 'src/node_snapshot_stub.cc',
- 'test/cctest/node_test_fixture.cc',
- 'test/cctest/node_test_fixture.h',
- 'test/cctest/test_aliased_buffer.cc',
- 'test/cctest/test_base64.cc',
- 'test/cctest/test_base_object_ptr.cc',
- 'test/cctest/test_cppgc.cc',
- 'test/cctest/test_node_postmortem_metadata.cc',
- 'test/cctest/test_environment.cc',
- 'test/cctest/test_linked_binding.cc',
- 'test/cctest/test_node_api.cc',
- 'test/cctest/test_per_process.cc',
- 'test/cctest/test_platform.cc',
- 'test/cctest/test_report.cc',
- 'test/cctest/test_json_utils.cc',
- 'test/cctest/test_sockaddr.cc',
- 'test/cctest/test_traced_value.cc',
- 'test/cctest/test_util.cc',
- 'test/cctest/test_dataqueue.cc',
- ],
+ 'sources': [ '<@(node_cctest_sources)' ],
'conditions': [
[ 'node_use_openssl=="true"', {
'defines': [
'HAVE_OPENSSL=1',
],
- 'sources': [
- 'test/cctest/test_crypto_clienthello.cc',
- 'test/cctest/test_node_crypto.cc',
- 'test/cctest/test_node_crypto_env.cc',
- 'test/cctest/test_quic_cid.cc',
- 'test/cctest/test_quic_tokens.cc',
- ]
+ 'sources': [ '<@(node_cctest_openssl_sources)' ],
}],
['v8_enable_inspector==1', {
- 'sources': [
- 'test/cctest/test_inspector_socket.cc',
- 'test/cctest/test_inspector_socket_server.cc'
- ],
'defines': [
'HAVE_INSPECTOR=1',
],
+ 'sources': [ '<@(node_cctest_inspector_sources)' ],
}, {
'defines': [
'HAVE_INSPECTOR=0',
@@ -1247,8 +1251,8 @@
],
'conditions': [
- ['OS in "linux mac"', {
- 'defines': [ 'NODE_MKSNAPSHOT_USE_STRING_LITERALS=1' ],
+ ['node_write_snapshot_as_array_literals=="true"', {
+ 'defines': [ 'NODE_MKSNAPSHOT_USE_ARRAY_LITERALS=1' ],
}],
[ 'node_use_openssl=="true"', {
'defines': [
diff --git a/onboarding.md b/onboarding.md
index 1e912f0c8a4379..be393a64f3fc67 100644
--- a/onboarding.md
+++ b/onboarding.md
@@ -10,7 +10,7 @@ onboarding session.
possible to add them to the organization if they are not using two-factor
authentication. If they cannot receive SMS messages from GitHub, try
[using a TOTP mobile app][].
-* Suggest the new Collaborator install [`node-core-utils`][] and
+* Suggest the new Collaborator install [`@node-core/utils`][] and
[set up the credentials][] for it.
## Fifteen minutes before the onboarding session
@@ -230,7 +230,7 @@ needs to be pointed out separately during the onboarding.
request.
* Be sure to add the `PR-URL: ` and appropriate `Reviewed-By:`
metadata.
- * [`node-core-utils`][] automates the generation of metadata and the landing
+ * [`@node-core/utils`][] automates the generation of metadata and the landing
process. See the documentation of [`git-node`][].
* [`core-validate-commit`][] automates the validation of commit messages.
This will be run during `git node land --final` of the [`git-node`][]
@@ -260,10 +260,10 @@ needs to be pointed out separately during the onboarding.
[Labels]: doc/contributing/collaborator-guide.md#labels
[Landing pull requests]: doc/contributing/collaborator-guide.md#landing-pull-requests
[Publicizing or hiding organization membership]: https://help.github.com/articles/publicizing-or-hiding-organization-membership/
+[`@node-core/utils`]: https://github.com/nodejs/node-core-utils
[`author-ready`]: doc/contributing/collaborator-guide.md#author-ready-pull-requests
[`core-validate-commit`]: https://github.com/nodejs/core-validate-commit
[`git-node`]: https://github.com/nodejs/node-core-utils/blob/HEAD/docs/git-node.md
-[`node-core-utils`]: https://github.com/nodejs/node-core-utils
[set up the credentials]: https://github.com/nodejs/node-core-utils#setting-up-github-credentials
[static-analysis]: doc/contributing/static-analysis.md
[two-factor authentication]: https://help.github.com/articles/securing-your-account-with-two-factor-authentication-2fa/
diff --git a/pyproject.toml b/pyproject.toml
index 6b51197ad66c2e..d0c3a056f2e92c 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -20,6 +20,7 @@ select = [
exclude = [
"deps",
"tools/inspector_protocol",
+ "tools/node_modules",
]
ignore = [
"E401",
diff --git a/src/debug_utils.h b/src/debug_utils.h
index 31c929f122cd1f..280b4cb39c780a 100644
--- a/src/debug_utils.h
+++ b/src/debug_utils.h
@@ -51,6 +51,7 @@ void NODE_EXTERN_PRIVATE FWrite(FILE* file, const std::string& str);
V(SEA) \
V(WASI) \
V(MKSNAPSHOT) \
+ V(SNAPSHOT_SERDES) \
V(PERMISSION_MODEL)
enum class DebugCategory : unsigned int {
diff --git a/src/env_properties.h b/src/env_properties.h
index 970e25d926dbb2..24de82429ec892 100644
--- a/src/env_properties.h
+++ b/src/env_properties.h
@@ -54,6 +54,7 @@
V(args_string, "args") \
V(asn1curve_string, "asn1Curve") \
V(async_ids_stack_string, "async_ids_stack") \
+ V(base_string, "base") \
V(bits_string, "bits") \
V(block_list_string, "blockList") \
V(buffer_string, "buffer") \
diff --git a/src/module_wrap.cc b/src/module_wrap.cc
index 0127a09167f851..b96106a39744b9 100644
--- a/src/module_wrap.cc
+++ b/src/module_wrap.cc
@@ -52,16 +52,22 @@ using v8::Value;
ModuleWrap::ModuleWrap(Environment* env,
Local object,
Local module,
- Local url)
- : BaseObject(env, object),
- module_(env->isolate(), module),
- id_(env->get_next_module_id()) {
+ Local url,
+ Local context_object,
+ Local synthetic_evaluation_step)
+ : BaseObject(env, object),
+ module_(env->isolate(), module),
+ id_(env->get_next_module_id()) {
env->id_to_module_map.emplace(id_, this);
- Local undefined = Undefined(env->isolate());
object->SetInternalField(kURLSlot, url);
- object->SetInternalField(kSyntheticEvaluationStepsSlot, undefined);
- object->SetInternalField(kContextObjectSlot, undefined);
+ object->SetInternalField(kSyntheticEvaluationStepsSlot,
+ synthetic_evaluation_step);
+ object->SetInternalField(kContextObjectSlot, context_object);
+
+ if (!synthetic_evaluation_step->IsUndefined()) {
+ synthetic_ = true;
+ }
}
ModuleWrap::~ModuleWrap() {
@@ -79,7 +85,9 @@ ModuleWrap::~ModuleWrap() {
Local ModuleWrap::context() const {
Local obj = object()->GetInternalField(kContextObjectSlot).As();
- if (obj.IsEmpty()) return {};
+ // If this fails, there is likely a bug e.g. ModuleWrap::context() is accessed
+ // before the ModuleWrap constructor completes.
+ CHECK(obj->IsObject());
return obj.As()->GetCreationContext().ToLocalChecked();
}
@@ -227,18 +235,16 @@ void ModuleWrap::New(const FunctionCallbackInfo& args) {
return;
}
- ModuleWrap* obj = new ModuleWrap(env, that, module, url);
-
- if (synthetic) {
- obj->synthetic_ = true;
- obj->object()->SetInternalField(kSyntheticEvaluationStepsSlot, args[3]);
- }
-
// Use the extras object as an object whose GetCreationContext() will be the
// original `context`, since the `Context` itself strictly speaking cannot
// be stored in an internal field.
- obj->object()->SetInternalField(kContextObjectSlot,
- context->GetExtrasBindingObject());
+ Local context_object = context->GetExtrasBindingObject();
+ Local synthetic_evaluation_step =
+ synthetic ? args[3] : Undefined(env->isolate()).As();
+
+ ModuleWrap* obj = new ModuleWrap(
+ env, that, module, url, context_object, synthetic_evaluation_step);
+
obj->contextify_context_ = contextify_context;
env->hash_to_module_map.emplace(module->GetIdentityHash(), obj);
diff --git a/src/module_wrap.h b/src/module_wrap.h
index c609ba5509dcd0..a3d3386763af85 100644
--- a/src/module_wrap.h
+++ b/src/module_wrap.h
@@ -72,7 +72,9 @@ class ModuleWrap : public BaseObject {
ModuleWrap(Environment* env,
v8::Local object,
v8::Local module,
- v8::Local url);
+ v8::Local url,
+ v8::Local context_object,
+ v8::Local synthetic_evaluation_step);
~ModuleWrap() override;
static void New(const v8::FunctionCallbackInfo& args);
diff --git a/src/node.cc b/src/node.cc
index a6b829c0ff2e7f..89e0e5524c2102 100644
--- a/src/node.cc
+++ b/src/node.cc
@@ -838,6 +838,10 @@ static ExitCode InitializeNodeWithArgsInternal(
V8::SetFlagsFromString(NODE_V8_OPTIONS, sizeof(NODE_V8_OPTIONS) - 1);
#endif
+ // Specify this explicitly to avoid being affected by V8 changes to the
+ // default value.
+ V8::SetFlagsFromString("--rehash-snapshot");
+
HandleEnvOptions(per_process::cli_options->per_isolate->per_env);
std::string node_options;
@@ -912,9 +916,14 @@ static ExitCode InitializeNodeWithArgsInternal(
// Initialize ICU.
// If icu_data_dir is empty here, it will load the 'minimal' data.
- if (!i18n::InitializeICUDirectory(per_process::cli_options->icu_data_dir)) {
- errors->push_back("could not initialize ICU "
- "(check NODE_ICU_DATA or --icu-data-dir parameters)\n");
+ std::string icu_error;
+ if (!i18n::InitializeICUDirectory(per_process::cli_options->icu_data_dir,
+ &icu_error)) {
+ errors->push_back(icu_error +
+ ": Could not initialize ICU. "
+ "Check the directory specified by NODE_ICU_DATA or "
+ "--icu-data-dir contains " U_ICUDATA_NAME ".dat and "
+ "it's readable\n");
return ExitCode::kInvalidCommandLineArgument;
}
per_process::metadata.versions.InitializeIntlVersions();
diff --git a/src/node_api.cc b/src/node_api.cc
index 7537dc20b2bd82..368f05f3f4a261 100644
--- a/src/node_api.cc
+++ b/src/node_api.cc
@@ -82,9 +82,8 @@ void node_napi_env__::trigger_fatal_exception(v8::Local local_err) {
node::errors::TriggerUncaughtException(isolate, local_err, local_msg);
}
-// option enforceUncaughtExceptionPolicy is added for not breaking existing
-// running n-api add-ons, and should be deprecated in the next major Node.js
-// release.
+// The option enforceUncaughtExceptionPolicy is added for not breaking existing
+// running Node-API add-ons.
template
void node_napi_env__::CallbackIntoModule(T&& call) {
CallIntoModule(call, [](napi_env env_, v8::Local local_err) {
@@ -93,19 +92,24 @@ void node_napi_env__::CallbackIntoModule(T&& call) {
return;
}
node::Environment* node_env = env->node_env();
- if (!node_env->options()->force_node_api_uncaught_exceptions_policy &&
+ // If the module api version is less than NAPI_VERSION_EXPERIMENTAL,
+ // and the option --force-node-api-uncaught-exceptions-policy is not
+ // specified, emit a warning about the uncaught exception instead of
+ // triggering uncaught exception event.
+ if (env->module_api_version < NAPI_VERSION_EXPERIMENTAL &&
+ !node_env->options()->force_node_api_uncaught_exceptions_policy &&
!enforceUncaughtExceptionPolicy) {
ProcessEmitDeprecationWarning(
node_env,
"Uncaught N-API callback exception detected, please run node "
- "with option --force-node-api-uncaught-exceptions-policy=true"
+ "with option --force-node-api-uncaught-exceptions-policy=true "
"to handle those exceptions properly.",
"DEP0168");
return;
}
// If there was an unhandled exception in the complete callback,
// report it as a fatal exception. (There is no JavaScript on the
- // callstack that can possibly handle it.)
+ // call stack that can possibly handle it.)
env->trigger_fatal_exception(local_err);
});
}
diff --git a/src/node_dir.cc b/src/node_dir.cc
index 0bef2b8927639b..10cde6067899c7 100644
--- a/src/node_dir.cc
+++ b/src/node_dir.cc
@@ -53,7 +53,7 @@ static const char* get_dir_func_name_by_type(uv_fs_type req_type) {
FS_TYPE_TO_NAME(CLOSEDIR, "closedir")
#undef FS_TYPE_TO_NAME
default:
- return "unknow";
+ return "unknown";
}
}
@@ -397,6 +397,32 @@ static void OpenDir(const FunctionCallbackInfo& args) {
}
}
+static void OpenDirSync(const FunctionCallbackInfo& args) {
+ Environment* env = Environment::GetCurrent(args);
+ Isolate* isolate = env->isolate();
+
+ CHECK_GE(args.Length(), 1);
+
+ BufferValue path(isolate, args[0]);
+ CHECK_NOT_NULL(*path);
+ THROW_IF_INSUFFICIENT_PERMISSIONS(
+ env, permission::PermissionScope::kFileSystemRead, path.ToStringView());
+
+ uv_fs_t req;
+ auto make = OnScopeLeave([&req]() { uv_fs_req_cleanup(&req); });
+ FS_DIR_SYNC_TRACE_BEGIN(opendir);
+ int err = uv_fs_opendir(nullptr, &req, *path, nullptr);
+ FS_DIR_SYNC_TRACE_END(opendir);
+ if (err < 0) {
+ return env->ThrowUVException(err, "opendir");
+ }
+
+ uv_dir_t* dir = static_cast(req.ptr);
+ DirHandle* handle = DirHandle::New(env, dir);
+
+ args.GetReturnValue().Set(handle->object().As());
+}
+
void Initialize(Local target,
Local unused,
Local context,
@@ -405,6 +431,7 @@ void Initialize(Local target,
Isolate* isolate = env->isolate();
SetMethod(context, target, "opendir", OpenDir);
+ SetMethod(context, target, "opendirSync", OpenDirSync);
// Create FunctionTemplate for DirHandle
Local dir = NewFunctionTemplate(isolate, DirHandle::New);
@@ -419,6 +446,7 @@ void Initialize(Local target,
void RegisterExternalReferences(ExternalReferenceRegistry* registry) {
registry->Register(OpenDir);
+ registry->Register(OpenDirSync);
registry->Register(DirHandle::New);
registry->Register(DirHandle::Read);
registry->Register(DirHandle::Close);
diff --git a/src/node_file.cc b/src/node_file.cc
index 285e532f0078e1..59780dec1c4b6d 100644
--- a/src/node_file.cc
+++ b/src/node_file.cc
@@ -157,7 +157,7 @@ static const char* get_fs_func_name_by_type(uv_fs_type req_type) {
FS_TYPE_TO_NAME(LUTIME, "lutime")
#undef FS_TYPE_TO_NAME
default:
- return "unknow";
+ return "unknown";
}
}
@@ -457,8 +457,7 @@ MaybeLocal FileHandle::ClosePromise() {
Local close_resolver =
object()->GetInternalField(FileHandle::kClosingPromiseSlot).As();
- if (!close_resolver.IsEmpty() && !close_resolver->IsUndefined()) {
- CHECK(close_resolver->IsPromise());
+ if (close_resolver->IsPromise()) {
return close_resolver.As();
}
@@ -997,6 +996,31 @@ void Access(const FunctionCallbackInfo& args) {
}
}
+static void AccessSync(const FunctionCallbackInfo& args) {
+ Environment* env = Environment::GetCurrent(args);
+ Isolate* isolate = env->isolate();
+
+ const int argc = args.Length();
+ CHECK_GE(argc, 2);
+
+ CHECK(args[1]->IsInt32());
+ int mode = args[1].As()->Value();
+
+ BufferValue path(isolate, args[0]);
+ CHECK_NOT_NULL(*path);
+ THROW_IF_INSUFFICIENT_PERMISSIONS(
+ env, permission::PermissionScope::kFileSystemRead, path.ToStringView());
+
+ uv_fs_t req;
+ FS_SYNC_TRACE_BEGIN(access);
+ int err = uv_fs_access(nullptr, &req, *path, mode, nullptr);
+ uv_fs_req_cleanup(&req);
+ FS_SYNC_TRACE_END(access);
+
+ if (err) {
+ return env->ThrowUVException(err, "access", nullptr, path.out());
+ }
+}
void Close(const FunctionCallbackInfo& args) {
Environment* env = Environment::GetCurrent(args);
@@ -1022,6 +1046,54 @@ void Close(const FunctionCallbackInfo& args) {
}
}
+static void CloseSync(const FunctionCallbackInfo& args) {
+ Environment* env = Environment::GetCurrent(args);
+ CHECK_GE(args.Length(), 1);
+ CHECK(args[0]->IsInt32());
+
+ int fd = args[0].As()->Value();
+ env->RemoveUnmanagedFd(fd);
+
+ uv_fs_t req;
+ FS_SYNC_TRACE_BEGIN(close);
+ int err = uv_fs_close(nullptr, &req, fd, nullptr);
+ FS_SYNC_TRACE_END(close);
+ uv_fs_req_cleanup(&req);
+
+ if (err < 0) {
+ return env->ThrowUVException(err, "close");
+ }
+}
+
+static void ExistsSync(const FunctionCallbackInfo& args) {
+ Environment* env = Environment::GetCurrent(args);
+ Isolate* isolate = env->isolate();
+ CHECK_GE(args.Length(), 1);
+
+ BufferValue path(isolate, args[0]);
+ CHECK_NOT_NULL(*path);
+ THROW_IF_INSUFFICIENT_PERMISSIONS(
+ env, permission::PermissionScope::kFileSystemRead, path.ToStringView());
+
+ uv_fs_t req;
+ auto make = OnScopeLeave([&req]() { uv_fs_req_cleanup(&req); });
+ FS_SYNC_TRACE_BEGIN(access);
+ int err = uv_fs_access(nullptr, &req, path.out(), 0, nullptr);
+ FS_SYNC_TRACE_END(access);
+
+#ifdef _WIN32
+ // In case of an invalid symlink, `uv_fs_access` on win32
+ // will **not** return an error and is therefore not enough.
+ // Double check with `uv_fs_stat()`.
+ if (err == 0) {
+ FS_SYNC_TRACE_BEGIN(stat);
+ err = uv_fs_stat(nullptr, &req, path.out(), nullptr);
+ FS_SYNC_TRACE_END(stat);
+ }
+#endif // _WIN32
+
+ args.GetReturnValue().Set(err == 0);
+}
// Used to speed up module loading. Returns an array [string, boolean]
static void InternalModuleReadJSON(const FunctionCallbackInfo& args) {
@@ -1178,6 +1250,41 @@ static void Stat(const FunctionCallbackInfo& args) {
}
}
+static void StatSync(const FunctionCallbackInfo& args) {
+ Realm* realm = Realm::GetCurrent(args);
+ BindingData* binding_data = realm->GetBindingData();
+ Environment* env = realm->env();
+
+ CHECK_GE(args.Length(), 3);
+
+ BufferValue path(realm->isolate(), args[0]);
+ bool use_bigint = args[1]->IsTrue();
+ bool do_not_throw_if_no_entry = args[2]->IsFalse();
+ CHECK_NOT_NULL(*path);
+ THROW_IF_INSUFFICIENT_PERMISSIONS(
+ env, permission::PermissionScope::kFileSystemRead, path.ToStringView());
+
+ env->PrintSyncTrace();
+
+ uv_fs_t req;
+ auto make = OnScopeLeave([&req]() { uv_fs_req_cleanup(&req); });
+
+ FS_SYNC_TRACE_BEGIN(stat);
+ int err = uv_fs_stat(nullptr, &req, *path, nullptr);
+ FS_SYNC_TRACE_END(stat);
+
+ if (err < 0) {
+ if (err == UV_ENOENT && do_not_throw_if_no_entry) {
+ return;
+ }
+ return env->ThrowUVException(err, "stat", nullptr, path.out());
+ }
+
+ Local arr = FillGlobalStatsArray(
+ binding_data, use_bigint, static_cast(req.ptr));
+ args.GetReturnValue().Set(arr);
+}
+
static void LStat(const FunctionCallbackInfo& args) {
Realm* realm = Realm::GetCurrent(args);
BindingData* binding_data = realm->GetBindingData();
@@ -1291,6 +1398,34 @@ static void StatFs(const FunctionCallbackInfo& args) {
}
}
+static void StatFsSync(const FunctionCallbackInfo& args) {
+ Realm* realm = Realm::GetCurrent(args);
+ BindingData* binding_data = realm->GetBindingData();
+ Environment* env = realm->env();
+
+ CHECK_GE(args.Length(), 2);
+
+ BufferValue path(realm->isolate(), args[0]);
+ bool use_bigint = args[1]->IsTrue();
+
+ CHECK_NOT_NULL(*path);
+ THROW_IF_INSUFFICIENT_PERMISSIONS(
+ env, permission::PermissionScope::kFileSystemRead, path.ToStringView());
+
+ uv_fs_t req;
+ auto make = OnScopeLeave([&req]() { uv_fs_req_cleanup(&req); });
+ FS_SYNC_TRACE_BEGIN(statfs);
+ int err = uv_fs_statfs(nullptr, &req, *path, nullptr);
+ FS_SYNC_TRACE_END(statfs);
+ if (err < 0) {
+ return env->ThrowUVException(err, "statfs", *path, nullptr);
+ }
+
+ Local arr = FillGlobalStatFsArray(
+ binding_data, use_bigint, static_cast(req.ptr));
+ args.GetReturnValue().Set(arr);
+}
+
static void Symlink(const FunctionCallbackInfo& args) {
Environment* env = Environment::GetCurrent(args);
Isolate* isolate = env->isolate();
@@ -1567,6 +1702,27 @@ static void Unlink(const FunctionCallbackInfo& args) {
}
}
+static void UnlinkSync(const FunctionCallbackInfo& args) {
+ Environment* env = Environment::GetCurrent(args);
+
+ const int argc = args.Length();
+ CHECK_GE(argc, 1);
+
+ BufferValue path(env->isolate(), args[0]);
+ CHECK_NOT_NULL(*path);
+ THROW_IF_INSUFFICIENT_PERMISSIONS(
+ env, permission::PermissionScope::kFileSystemWrite, path.ToStringView());
+
+ uv_fs_t req;
+ auto make = OnScopeLeave([&req]() { uv_fs_req_cleanup(&req); });
+ FS_SYNC_TRACE_BEGIN(unlink);
+ int err = uv_fs_unlink(nullptr, &req, *path, nullptr);
+ FS_SYNC_TRACE_END(unlink);
+ if (err < 0) {
+ return env->ThrowUVException(err, "unlink", nullptr, *path);
+ }
+}
+
static void RMDir(const FunctionCallbackInfo& args) {
Environment* env = Environment::GetCurrent(args);
@@ -1981,7 +2137,6 @@ static inline Maybe CheckOpenPermissions(Environment* env,
const int write_as_side_effect = flags & (UV_FS_O_APPEND | UV_FS_O_CREAT |
UV_FS_O_TRUNC | UV_FS_O_TEMPORARY);
- // TODO(rafaelgss): it can be optimized to avoid two permission checks
auto pathView = path.ToStringView();
if (rwflags != UV_FS_O_WRONLY) {
THROW_IF_INSUFFICIENT_PERMISSIONS(
@@ -2000,74 +2155,6 @@ static inline Maybe CheckOpenPermissions(Environment* env,
return JustVoid();
}
-static void ReadFileSync(const FunctionCallbackInfo& args) {
- Environment* env = Environment::GetCurrent(args);
- auto isolate = env->isolate();
-
- CHECK_GE(args.Length(), 2);
-
- BufferValue path(env->isolate(), args[0]);
- CHECK_NOT_NULL(*path);
-
- CHECK(args[1]->IsInt32());
- const int flags = args[1].As()->Value();
-
- if (CheckOpenPermissions(env, path, flags).IsNothing()) return;
-
- uv_fs_t req;
- auto defer_req_cleanup = OnScopeLeave([&req]() { uv_fs_req_cleanup(&req); });
-
- FS_SYNC_TRACE_BEGIN(open);
- uv_file file = uv_fs_open(nullptr, &req, *path, flags, 438, nullptr);
- FS_SYNC_TRACE_END(open);
- if (req.result < 0) {
- // req will be cleaned up by scope leave.
- Local out[] = {
- Integer::New(isolate, req.result), // errno
- FIXED_ONE_BYTE_STRING(isolate, "open"), // syscall
- };
- return args.GetReturnValue().Set(Array::New(isolate, out, arraysize(out)));
- }
- uv_fs_req_cleanup(&req);
-
- auto defer_close = OnScopeLeave([file]() {
- uv_fs_t close_req;
- CHECK_EQ(0, uv_fs_close(nullptr, &close_req, file, nullptr));
- uv_fs_req_cleanup(&close_req);
- });
-
- std::string result{};
- char buffer[8192];
- uv_buf_t buf = uv_buf_init(buffer, sizeof(buffer));
-
- FS_SYNC_TRACE_BEGIN(read);
- while (true) {
- auto r = uv_fs_read(nullptr, &req, file, &buf, 1, -1, nullptr);
- if (req.result < 0) {
- FS_SYNC_TRACE_END(read);
- // req will be cleaned up by scope leave.
- Local out[] = {
- Integer::New(isolate, req.result), // errno
- FIXED_ONE_BYTE_STRING(isolate, "read"), // syscall
- };
- return args.GetReturnValue().Set(
- Array::New(isolate, out, arraysize(out)));
- }
- uv_fs_req_cleanup(&req);
- if (r <= 0) {
- break;
- }
- result.append(buf.base, r);
- }
- FS_SYNC_TRACE_END(read);
-
- args.GetReturnValue().Set(String::NewFromUtf8(env->isolate(),
- result.data(),
- v8::NewStringType::kNormal,
- result.size())
- .ToLocalChecked());
-}
-
static void Open(const FunctionCallbackInfo& args) {
Environment* env = Environment::GetCurrent(args);
@@ -2104,6 +2191,35 @@ static void Open(const FunctionCallbackInfo& args) {
}
}
+static void OpenSync(const FunctionCallbackInfo& args) {
+ Environment* env = Environment::GetCurrent(args);
+
+ const int argc = args.Length();
+ CHECK_GE(argc, 3);
+
+ BufferValue path(env->isolate(), args[0]);
+ CHECK_NOT_NULL(*path);
+
+ CHECK(args[1]->IsInt32());
+ const int flags = args[1].As()->Value();
+
+ CHECK(args[2]->IsInt32());
+ const int mode = args[2].As()->Value();
+
+ if (CheckOpenPermissions(env, path, flags).IsNothing()) return;
+
+ uv_fs_t req;
+ auto make = OnScopeLeave([&req]() { uv_fs_req_cleanup(&req); });
+ FS_SYNC_TRACE_BEGIN(open);
+ auto err = uv_fs_open(nullptr, &req, *path, flags, mode, nullptr);
+ FS_SYNC_TRACE_END(open);
+ if (err < 0) {
+ return env->ThrowUVException(err, "open", nullptr, path.out());
+ }
+ env->AddUnmanagedFd(err);
+ args.GetReturnValue().Set(err);
+}
+
static void OpenFileHandle(const FunctionCallbackInfo& args) {
Realm* realm = Realm::GetCurrent(args);
BindingData* binding_data = realm->GetBindingData();
@@ -2186,6 +2302,38 @@ static void CopyFile(const FunctionCallbackInfo& args) {
}
}
+static void CopyFileSync(const FunctionCallbackInfo& args) {
+ Environment* env = Environment::GetCurrent(args);
+ Isolate* isolate = env->isolate();
+
+ const int argc = args.Length();
+ CHECK_GE(argc, 3);
+
+ BufferValue src(isolate, args[0]);
+ CHECK_NOT_NULL(*src);
+ THROW_IF_INSUFFICIENT_PERMISSIONS(
+ env, permission::PermissionScope::kFileSystemRead, src.ToStringView());
+
+ BufferValue dest(isolate, args[1]);
+ CHECK_NOT_NULL(*dest);
+ THROW_IF_INSUFFICIENT_PERMISSIONS(
+ env, permission::PermissionScope::kFileSystemWrite, dest.ToStringView());
+
+ CHECK(args[2]->IsInt32());
+ const int flags = args[2].As()->Value();
+
+ uv_fs_t req;
+ FS_SYNC_TRACE_BEGIN(copyfile);
+ int err =
+ uv_fs_copyfile(nullptr, &req, src.out(), dest.out(), flags, nullptr);
+ uv_fs_req_cleanup(&req);
+ FS_SYNC_TRACE_END(copyfile);
+
+ if (err) {
+ return env->ThrowUVException(
+ err, "copyfile", nullptr, src.out(), dest.out());
+ }
+}
// Wrapper for write(2).
//
@@ -2448,6 +2596,69 @@ static void Read(const FunctionCallbackInfo& args) {
}
}
+static void ReadFileUtf8(const FunctionCallbackInfo& args) {
+ Environment* env = Environment::GetCurrent(args);
+ auto isolate = env->isolate();
+
+ CHECK_GE(args.Length(), 2);
+
+ CHECK(args[1]->IsInt32());
+ const int flags = args[1].As()->Value();
+
+ uv_file file;
+ uv_fs_t req;
+
+ bool is_fd = args[0]->IsInt32();
+
+ // Check for file descriptor
+ if (is_fd) {
+ file = args[0].As()->Value();
+ } else {
+ BufferValue path(env->isolate(), args[0]);
+ CHECK_NOT_NULL(*path);
+ if (CheckOpenPermissions(env, path, flags).IsNothing()) return;
+
+ FS_SYNC_TRACE_BEGIN(open);
+ file = uv_fs_open(nullptr, &req, *path, flags, O_RDONLY, nullptr);
+ FS_SYNC_TRACE_END(open);
+ if (req.result < 0) {
+ uv_fs_req_cleanup(&req);
+ // req will be cleaned up by scope leave.
+ return env->ThrowUVException(req.result, "open", nullptr, path.out());
+ }
+ }
+
+ auto defer_close = OnScopeLeave([file, is_fd, &req]() {
+ if (!is_fd) {
+ FS_SYNC_TRACE_BEGIN(close);
+ CHECK_EQ(0, uv_fs_close(nullptr, &req, file, nullptr));
+ FS_SYNC_TRACE_END(close);
+ }
+ uv_fs_req_cleanup(&req);
+ });
+
+ std::string result{};
+ char buffer[8192];
+ uv_buf_t buf = uv_buf_init(buffer, sizeof(buffer));
+
+ FS_SYNC_TRACE_BEGIN(read);
+ while (true) {
+ auto r = uv_fs_read(nullptr, &req, file, &buf, 1, -1, nullptr);
+ if (req.result < 0) {
+ FS_SYNC_TRACE_END(read);
+ // req will be cleaned up by scope leave.
+ return env->ThrowUVException(req.result, "read", nullptr);
+ }
+ if (r <= 0) {
+ break;
+ }
+ result.append(buf.base, r);
+ }
+ FS_SYNC_TRACE_END(read);
+
+ args.GetReturnValue().Set(
+ ToV8Value(env->context(), result, isolate).ToLocalChecked());
+}
// Wrapper for readv(2).
//
@@ -2560,7 +2771,6 @@ static void FChmod(const FunctionCallbackInfo& args) {
}
}
-
/* fs.chown(path, uid, gid);
* Wrapper for chown(1) / EIO_CHOWN
*/
@@ -3207,10 +3417,15 @@ static void CreatePerIsolateProperties(IsolateData* isolate_data,
Isolate* isolate = isolate_data->isolate();
SetMethod(isolate, target, "access", Access);
+ SetMethod(isolate, target, "accessSync", AccessSync);
SetMethod(isolate, target, "close", Close);
+ SetMethod(isolate, target, "closeSync", CloseSync);
+ SetMethod(isolate, target, "existsSync", ExistsSync);
SetMethod(isolate, target, "open", Open);
+ SetMethod(isolate, target, "openSync", OpenSync);
SetMethod(isolate, target, "openFileHandle", OpenFileHandle);
SetMethod(isolate, target, "read", Read);
+ SetMethod(isolate, target, "readFileUtf8", ReadFileUtf8);
SetMethod(isolate, target, "readBuffers", ReadBuffers);
SetMethod(isolate, target, "fdatasync", Fdatasync);
SetMethod(isolate, target, "fsync", Fsync);
@@ -3222,19 +3437,22 @@ static void CreatePerIsolateProperties(IsolateData* isolate_data,
SetMethod(isolate, target, "internalModuleReadJSON", InternalModuleReadJSON);
SetMethod(isolate, target, "internalModuleStat", InternalModuleStat);
SetMethod(isolate, target, "stat", Stat);
+ SetMethod(isolate, target, "statSync", StatSync);
SetMethod(isolate, target, "lstat", LStat);
SetMethod(isolate, target, "fstat", FStat);
- SetMethodNoSideEffect(isolate, target, "readFileSync", ReadFileSync);
SetMethod(isolate, target, "statfs", StatFs);
+ SetMethod(isolate, target, "statfsSync", StatFsSync);
SetMethod(isolate, target, "link", Link);
SetMethod(isolate, target, "symlink", Symlink);
SetMethod(isolate, target, "readlink", ReadLink);
SetMethod(isolate, target, "unlink", Unlink);
+ SetMethod(isolate, target, "unlinkSync", UnlinkSync);
SetMethod(isolate, target, "writeBuffer", WriteBuffer);
SetMethod(isolate, target, "writeBuffers", WriteBuffers);
SetMethod(isolate, target, "writeString", WriteString);
SetMethod(isolate, target, "realpath", RealPath);
SetMethod(isolate, target, "copyFile", CopyFile);
+ SetMethod(isolate, target, "copyFileSync", CopyFileSync);
SetMethod(isolate, target, "chmod", Chmod);
SetMethod(isolate, target, "fchmod", FChmod);
@@ -3322,13 +3540,18 @@ BindingData* FSReqBase::binding_data() {
void RegisterExternalReferences(ExternalReferenceRegistry* registry) {
registry->Register(Access);
+ registry->Register(AccessSync);
StatWatcher::RegisterExternalReferences(registry);
BindingData::RegisterExternalReferences(registry);
registry->Register(Close);
+ registry->Register(CloseSync);
+ registry->Register(ExistsSync);
registry->Register(Open);
+ registry->Register(OpenSync);
registry->Register(OpenFileHandle);
registry->Register(Read);
+ registry->Register(ReadFileUtf8);
registry->Register(ReadBuffers);
registry->Register(Fdatasync);
registry->Register(Fsync);
@@ -3340,19 +3563,22 @@ void RegisterExternalReferences(ExternalReferenceRegistry* registry) {
registry->Register(InternalModuleReadJSON);
registry->Register(InternalModuleStat);
registry->Register(Stat);
+ registry->Register(StatSync);
registry->Register(LStat);
registry->Register(FStat);
- registry->Register(ReadFileSync);
registry->Register(StatFs);
+ registry->Register(StatFsSync);
registry->Register(Link);
registry->Register(Symlink);
registry->Register(ReadLink);
registry->Register(Unlink);
+ registry->Register(UnlinkSync);
registry->Register(WriteBuffer);
registry->Register(WriteBuffers);
registry->Register(WriteString);
registry->Register(RealPath);
registry->Register(CopyFile);
+ registry->Register(CopyFileSync);
registry->Register(Chmod);
registry->Register(FChmod);
diff --git a/src/node_i18n.cc b/src/node_i18n.cc
index 372df8d029fc4f..d45325954d9807 100644
--- a/src/node_i18n.cc
+++ b/src/node_i18n.cc
@@ -54,20 +54,21 @@
#include "util-inl.h"
#include "v8.h"
-#include
#include
+#include
#include
#include
+#include
#include
#include
-#include
-#include
-#include
-#include
#include
+#include
+#include
+#include
+#include
+#include
#include
#include
-#include
#ifdef NODE_HAVE_SMALL_ICU
/* if this is defined, we have a 'secondary' entry point.
@@ -569,8 +570,7 @@ ConverterObject::ConverterObject(
}
}
-
-bool InitializeICUDirectory(const std::string& path) {
+bool InitializeICUDirectory(const std::string& path, std::string* error) {
UErrorCode status = U_ZERO_ERROR;
if (path.empty()) {
#ifdef NODE_HAVE_SMALL_ICU
@@ -583,7 +583,12 @@ bool InitializeICUDirectory(const std::string& path) {
u_setDataDirectory(path.c_str());
u_init(&status);
}
- return status == U_ZERO_ERROR;
+ if (status == U_ZERO_ERROR) {
+ return true;
+ }
+
+ *error = u_errorName(status);
+ return false;
}
void SetDefaultTimeZone(const char* tzid) {
diff --git a/src/node_i18n.h b/src/node_i18n.h
index f32ade831b17a0..e516282865fb18 100644
--- a/src/node_i18n.h
+++ b/src/node_i18n.h
@@ -38,7 +38,7 @@
namespace node {
namespace i18n {
-bool InitializeICUDirectory(const std::string& path);
+bool InitializeICUDirectory(const std::string& path, std::string* error);
void SetDefaultTimeZone(const char* tzid);
diff --git a/src/node_options.cc b/src/node_options.cc
index 6ea85e3399be69..b544f1209143c0 100644
--- a/src/node_options.cc
+++ b/src/node_options.cc
@@ -173,7 +173,7 @@ void EnvironmentOptions::CheckOptions(std::vector* errors,
} else if (force_repl) {
errors->push_back("either --watch or --interactive "
"can be used, not both");
- } else if (argv->size() < 1 || (*argv)[1].empty()) {
+ } else if (!test_runner && (argv->size() < 1 || (*argv)[1].empty())) {
errors->push_back("--watch requires specifying a file");
}
diff --git a/src/node_snapshot_builder.h b/src/node_snapshot_builder.h
index 22d37242c96d22..66768cfd201b5e 100644
--- a/src/node_snapshot_builder.h
+++ b/src/node_snapshot_builder.h
@@ -23,7 +23,7 @@ class NODE_EXTERN_PRIVATE SnapshotBuilder {
const std::vector& args,
const std::vector& exec_args,
std::optional main_script_path = std::nullopt,
- bool use_string_literals = true);
+ bool use_array_literals = false);
// Generate the snapshot into out.
static ExitCode Generate(SnapshotData* out,
diff --git a/src/node_snapshotable.cc b/src/node_snapshotable.cc
index 1d93f846a1a981..562a47ddcc9c8e 100644
--- a/src/node_snapshotable.cc
+++ b/src/node_snapshotable.cc
@@ -145,7 +145,8 @@ class SnapshotDeserializer : public BlobDeserializer {
public:
explicit SnapshotDeserializer(std::string_view v)
: BlobDeserializer(
- per_process::enabled_debug_list.enabled(DebugCategory::MKSNAPSHOT),
+ per_process::enabled_debug_list.enabled(
+ DebugCategory::SNAPSHOT_SERDES),
v) {}
template {
SnapshotSerializer()
: BlobSerializer(
per_process::enabled_debug_list.enabled(
- DebugCategory::MKSNAPSHOT)) {
+ DebugCategory::SNAPSHOT_SERDES)) {
// Currently the snapshot blob built with an empty script is around 4MB.
// So use that as the default sink size.
sink.reserve(4 * 1024 * 1024);
@@ -773,11 +774,11 @@ void WriteByteVectorLiteral(std::ostream* ss,
const T* vec,
size_t size,
const char* var_name,
- bool use_string_literals) {
+ bool use_array_literals) {
constexpr bool is_uint8_t = std::is_same_v;
static_assert(is_uint8_t || std::is_same_v);
constexpr const char* type_name = is_uint8_t ? "uint8_t" : "char";
- if (use_string_literals) {
+ if (!use_array_literals) {
const uint8_t* data = reinterpret_cast(vec);
*ss << "static const " << type_name << " *" << var_name << " = ";
*ss << (is_uint8_t ? R"(reinterpret_cast(")" : "\"");
@@ -818,7 +819,7 @@ static void WriteCodeCacheInitializer(std::ostream* ss,
void FormatBlob(std::ostream& ss,
const SnapshotData* data,
- bool use_string_literals) {
+ bool use_array_literals) {
ss << R"(#include
#include "env.h"
#include "node_snapshot_builder.h"
@@ -833,7 +834,7 @@ namespace node {
data->v8_snapshot_blob_data.data,
data->v8_snapshot_blob_data.raw_size,
"v8_snapshot_blob_data",
- use_string_literals);
+ use_array_literals);
ss << R"(static const int v8_snapshot_blob_size = )"
<< data->v8_snapshot_blob_data.raw_size << ";\n";
@@ -846,7 +847,7 @@ namespace node {
item.data.data,
item.data.length,
var_name.c_str(),
- use_string_literals);
+ use_array_literals);
}
ss << R"(const SnapshotData snapshot_data {
@@ -975,21 +976,8 @@ ExitCode BuildSnapshotWithoutCodeCache(
ExitCode BuildCodeCacheFromSnapshot(SnapshotData* out,
const std::vector& args,
const std::vector& exec_args) {
- std::vector errors;
- auto data_wrapper = out->AsEmbedderWrapper();
- auto setup = CommonEnvironmentSetup::CreateFromSnapshot(
- per_process::v8_platform.Platform(),
- &errors,
- data_wrapper.get(),
- args,
- exec_args);
- if (!setup) {
- for (const auto& err : errors)
- fprintf(stderr, "%s: %s\n", args[0].c_str(), err.c_str());
- return ExitCode::kBootstrapFailure;
- }
-
- Isolate* isolate = setup->isolate();
+ RAIIIsolate raii_isolate(out);
+ Isolate* isolate = raii_isolate.get();
v8::Locker locker(isolate);
Isolate::Scope isolate_scope(isolate);
HandleScope handle_scope(isolate);
@@ -1002,12 +990,14 @@ ExitCode BuildCodeCacheFromSnapshot(SnapshotData* out,
}
});
- Environment* env = setup->env();
+ Local context = Context::New(isolate);
+ Context::Scope context_scope(context);
+ builtins::BuiltinLoader builtin_loader;
// Regenerate all the code cache.
- if (!env->builtin_loader()->CompileAllBuiltins(setup->context())) {
+ if (!builtin_loader.CompileAllBuiltins(context)) {
return ExitCode::kGenericUserError;
}
- env->builtin_loader()->CopyCodeCache(&(out->code_cache));
+ builtin_loader.CopyCodeCache(&(out->code_cache));
if (per_process::enabled_debug_list.enabled(DebugCategory::MKSNAPSHOT)) {
for (const auto& item : out->code_cache) {
std::string size_str = FormatSize(item.data.length);
@@ -1143,7 +1133,7 @@ ExitCode SnapshotBuilder::GenerateAsSource(
const std::vector& args,
const std::vector& exec_args,
std::optional main_script_path,
- bool use_string_literals) {
+ bool use_array_literals) {
std::string main_script_content;
std::optional main_script_optional;
if (main_script_path.has_value()) {
@@ -1170,7 +1160,7 @@ ExitCode SnapshotBuilder::GenerateAsSource(
if (exit_code != ExitCode::kNoFailure) {
return exit_code;
}
- FormatBlob(out, &data, use_string_literals);
+ FormatBlob(out, &data, use_array_literals);
if (!out) {
std::cerr << "Failed to write to " << out_path << "\n";
diff --git a/src/node_url.cc b/src/node_url.cc
index 666492ca47cee3..89fcfec20f5685 100644
--- a/src/node_url.cc
+++ b/src/node_url.cc
@@ -227,6 +227,35 @@ void BindingData::Format(const FunctionCallbackInfo& args) {
.ToLocalChecked());
}
+void BindingData::ThrowInvalidURL(node::Environment* env,
+ std::string_view input,
+ std::optional base) {
+ Local err = ERR_INVALID_URL(env->isolate(), "Invalid URL");
+ DCHECK(err->IsObject());
+
+ auto err_object = err.As();
+
+ USE(err_object->Set(env->context(),
+ env->input_string(),
+ v8::String::NewFromUtf8(env->isolate(),
+ input.data(),
+ v8::NewStringType::kNormal,
+ input.size())
+ .ToLocalChecked()));
+
+ if (base.has_value()) {
+ USE(err_object->Set(env->context(),
+ env->base_string(),
+ v8::String::NewFromUtf8(env->isolate(),
+ base.value().c_str(),
+ v8::NewStringType::kNormal,
+ base.value().size())
+ .ToLocalChecked()));
+ }
+
+ env->isolate()->ThrowException(err);
+}
+
void BindingData::Parse(const FunctionCallbackInfo& args) {
CHECK_GE(args.Length(), 1);
CHECK(args[0]->IsString()); // input
@@ -235,15 +264,16 @@ void BindingData::Parse(const FunctionCallbackInfo& args) {
Realm* realm = Realm::GetCurrent(args);
BindingData* binding_data = realm->GetBindingData();
Isolate* isolate = realm->isolate();
+ std::optional base_{};
Utf8Value input(isolate, args[0]);
ada::result base;
ada::url_aggregator* base_pointer = nullptr;
if (args[1]->IsString()) {
- base =
- ada::parse(Utf8Value(isolate, args[1]).ToString());
+ base_ = Utf8Value(isolate, args[1]).ToString();
+ base = ada::parse(*base_);
if (!base) {
- return args.GetReturnValue().Set(false);
+ return ThrowInvalidURL(realm->env(), input.ToStringView(), base_);
}
base_pointer = &base.value();
}
@@ -251,7 +281,7 @@ void BindingData::Parse(const FunctionCallbackInfo& args) {
ada::parse(input.ToStringView(), base_pointer);
if (!out) {
- return args.GetReturnValue().Set(false);
+ return ThrowInvalidURL(realm->env(), input.ToStringView(), base_);
}
binding_data->UpdateComponents(out->get_components(), out->type);
diff --git a/src/node_url.h b/src/node_url.h
index c485caa2eb0343..f3aa136a5b538d 100644
--- a/src/node_url.h
+++ b/src/node_url.h
@@ -76,6 +76,9 @@ class BindingData : public SnapshotableObject {
const ada::scheme::type type);
static v8::CFunction fast_can_parse_methods_[];
+ static void ThrowInvalidURL(Environment* env,
+ std::string_view input,
+ std::optional base);
};
std::string FromFilePath(const std::string_view file_path);
diff --git a/src/node_version.h b/src/node_version.h
index 4164eef7cc0241..78b057a6c1023b 100644
--- a/src/node_version.h
+++ b/src/node_version.h
@@ -29,7 +29,7 @@
#define NODE_VERSION_IS_LTS 0
#define NODE_VERSION_LTS_CODENAME ""
-#define NODE_VERSION_IS_RELEASE 0
+#define NODE_VERSION_IS_RELEASE 1
#ifndef NODE_STRINGIFY
#define NODE_STRINGIFY(n) NODE_STRINGIFY_HELPER(n)
@@ -83,13 +83,20 @@
* We will, at times update the version of V8 shipped in the release line
* if it can be made ABI compatible with the previous version.
*
+ * Embedders building Node.js can define NODE_EMBEDDER_MODULE_VERSION to
+ * override the default value of NODE_MODULE_VERSION.
+ *
* The registry of used NODE_MODULE_VERSION numbers is located at
* https://github.com/nodejs/node/blob/HEAD/doc/abi_version_registry.json
* Extenders, embedders and other consumers of Node.js that require ABI
* version matching should open a pull request to reserve a number in this
* registry.
*/
+#if defined(NODE_EMBEDDER_MODULE_VERSION)
+#define NODE_MODULE_VERSION NODE_EMBEDDER_MODULE_VERSION
+#else
#define NODE_MODULE_VERSION 115
+#endif
// The NAPI_VERSION provided by this version of the runtime. This is the version
// which the Node binary being built supports.
diff --git a/src/pipe_wrap.cc b/src/pipe_wrap.cc
index 944d7c3e72c534..738a51a140d0af 100644
--- a/src/pipe_wrap.cc
+++ b/src/pipe_wrap.cc
@@ -62,7 +62,6 @@ MaybeLocal PipeWrap::Instantiate(Environment* env,
constructor->NewInstance(env->context(), 1, &type_value));
}
-
void PipeWrap::Initialize(Local target,
Local unused,
Local context,
@@ -71,8 +70,7 @@ void PipeWrap::Initialize(Local target,
Isolate* isolate = env->isolate();
Local t = NewFunctionTemplate(isolate, New);
- t->InstanceTemplate()
- ->SetInternalFieldCount(StreamBase::kInternalFieldCount);
+ t->InstanceTemplate()->SetInternalFieldCount(StreamBase::kInternalFieldCount);
t->Inherit(LibuvStreamWrap::GetConstructorTemplate(env));
@@ -102,9 +100,7 @@ void PipeWrap::Initialize(Local target,
NODE_DEFINE_CONSTANT(constants, IPC);
NODE_DEFINE_CONSTANT(constants, UV_READABLE);
NODE_DEFINE_CONSTANT(constants, UV_WRITABLE);
- target->Set(context,
- env->constants_string(),
- constants).Check();
+ target->Set(context, env->constants_string(), constants).Check();
}
void PipeWrap::RegisterExternalReferences(ExternalReferenceRegistry* registry) {
@@ -152,7 +148,6 @@ void PipeWrap::New(const FunctionCallbackInfo& args) {
new PipeWrap(env, args.This(), provider, ipc);
}
-
PipeWrap::PipeWrap(Environment* env,
Local object,
ProviderType provider,
@@ -163,16 +158,14 @@ PipeWrap::PipeWrap(Environment* env,
// Suggestion: uv_pipe_init() returns void.
}
-
void PipeWrap::Bind(const FunctionCallbackInfo& args) {
PipeWrap* wrap;
ASSIGN_OR_RETURN_UNWRAP(&wrap, args.Holder());
node::Utf8Value name(args.GetIsolate(), args[0]);
- int err = uv_pipe_bind(&wrap->handle_, *name);
+ int err = uv_pipe_bind2(&wrap->handle_, *name, name.length(), 0);
args.GetReturnValue().Set(err);
}
-
#ifdef _WIN32
void PipeWrap::SetPendingInstances(const FunctionCallbackInfo& args) {
PipeWrap* wrap;
@@ -183,7 +176,6 @@ void PipeWrap::SetPendingInstances(const FunctionCallbackInfo& args) {
}
#endif
-
void PipeWrap::Fchmod(const v8::FunctionCallbackInfo& args) {
PipeWrap* wrap;
ASSIGN_OR_RETURN_UNWRAP(&wrap, args.Holder());
@@ -193,20 +185,17 @@ void PipeWrap::Fchmod(const v8::FunctionCallbackInfo& args) {
args.GetReturnValue().Set(err);
}
-
void PipeWrap::Listen(const FunctionCallbackInfo& args) {
PipeWrap* wrap;
ASSIGN_OR_RETURN_UNWRAP(&wrap, args.Holder());
Environment* env = wrap->env();
int backlog;
if (!args[0]->Int32Value(env->context()).To(&backlog)) return;
- int err = uv_listen(reinterpret_cast(&wrap->handle_),
- backlog,
- OnConnection);
+ int err = uv_listen(
+ reinterpret_cast(&wrap->handle_), backlog, OnConnection);
args.GetReturnValue().Set(err);
}
-
void PipeWrap::Open(const FunctionCallbackInfo& args) {
Environment* env = Environment::GetCurrent(args);
@@ -222,7 +211,6 @@ void PipeWrap::Open(const FunctionCallbackInfo& args) {
args.GetReturnValue().Set(err);
}
-
void PipeWrap::Connect(const FunctionCallbackInfo& args) {
Environment* env = Environment::GetCurrent(args);
@@ -237,10 +225,8 @@ void PipeWrap::Connect(const FunctionCallbackInfo& args) {
ConnectWrap* req_wrap =
new ConnectWrap(env, req_wrap_obj, AsyncWrap::PROVIDER_PIPECONNECTWRAP);
- req_wrap->Dispatch(uv_pipe_connect,
- &wrap->handle_,
- *name,
- AfterConnect);
+ req_wrap->Dispatch(
+ uv_pipe_connect2, &wrap->handle_, *name, name.length(), 0, AfterConnect);
TRACE_EVENT_NESTABLE_ASYNC_BEGIN1(TRACING_CATEGORY_NODE2(net, native),
"connect",
@@ -251,7 +237,6 @@ void PipeWrap::Connect(const FunctionCallbackInfo& args) {
args.GetReturnValue().Set(0); // uv_pipe_connect() doesn't return errors.
}
-
} // namespace node
NODE_BINDING_CONTEXT_AWARE_INTERNAL(pipe_wrap, node::PipeWrap::Initialize)
diff --git a/src/stream_base.h b/src/stream_base.h
index e96ff036157ab9..8f6a7b22ea1f2f 100644
--- a/src/stream_base.h
+++ b/src/stream_base.h
@@ -225,7 +225,7 @@ class StreamResource {
// These need to be implemented on the readable side of this stream:
// Start reading from the underlying resource. This is called by the consumer
- // when more data is desired. Use `EmitAlloc()` and `EmitData()` to
+ // when more data is desired. Use `EmitAlloc()` and `EmitRead()` to
// pass data along to the consumer.
virtual int ReadStart() = 0;
// Stop reading from the underlying resource. This is called by the
diff --git a/src/undici_version.h b/src/undici_version.h
index 47aef25a73212d..d47c6d538b7355 100644
--- a/src/undici_version.h
+++ b/src/undici_version.h
@@ -2,5 +2,5 @@
// Refer to tools/update-undici.sh
#ifndef SRC_UNDICI_VERSION_H_
#define SRC_UNDICI_VERSION_H_
-#define UNDICI_VERSION "5.23.0"
+#define UNDICI_VERSION "5.25.2"
#endif // SRC_UNDICI_VERSION_H_
diff --git a/test/common/sea.js b/test/common/sea.js
index bb337f176f2cc2..cc1890a5464012 100644
--- a/test/common/sea.js
+++ b/test/common/sea.js
@@ -4,7 +4,9 @@ const common = require('../common');
const fixtures = require('../common/fixtures');
const { readFileSync } = require('fs');
-const { execFileSync } = require('child_process');
+const {
+ spawnSyncAndExitWithoutError,
+} = require('../common/child_process');
function skipIfSingleExecutableIsNotSupported() {
if (!process.config.variables.single_executable_application)
@@ -45,38 +47,39 @@ function skipIfSingleExecutableIsNotSupported() {
function injectAndCodeSign(targetExecutable, resource) {
const postjectFile = fixtures.path('postject-copy', 'node_modules', 'postject', 'dist', 'cli.js');
- execFileSync(process.execPath, [
+ spawnSyncAndExitWithoutError(process.execPath, [
postjectFile,
targetExecutable,
'NODE_SEA_BLOB',
resource,
'--sentinel-fuse', 'NODE_SEA_FUSE_fce680ab2cc467b6e072b8b5df1996b2',
...process.platform === 'darwin' ? [ '--macho-segment-name', 'NODE_SEA' ] : [],
- ]);
+ ], {});
if (process.platform === 'darwin') {
- execFileSync('codesign', [ '--sign', '-', targetExecutable ]);
- execFileSync('codesign', [ '--verify', targetExecutable ]);
+ spawnSyncAndExitWithoutError('codesign', [ '--sign', '-', targetExecutable ], {});
+ spawnSyncAndExitWithoutError('codesign', [ '--verify', targetExecutable ], {});
} else if (process.platform === 'win32') {
let signtoolFound = false;
try {
- execFileSync('where', [ 'signtool' ]);
+ spawnSyncAndExitWithoutError('where', [ 'signtool' ], {});
signtoolFound = true;
} catch (err) {
console.log(err.message);
}
if (signtoolFound) {
let certificatesFound = false;
+ let stderr;
try {
- execFileSync('signtool', [ 'sign', '/fd', 'SHA256', targetExecutable ]);
+ ({ stderr } = spawnSyncAndExitWithoutError('signtool', [ 'sign', '/fd', 'SHA256', targetExecutable ], {}));
certificatesFound = true;
} catch (err) {
- if (!/SignTool Error: No certificates were found that met all the given criteria/.test(err)) {
+ if (!/SignTool Error: No certificates were found that met all the given criteria/.test(stderr)) {
throw err;
}
}
if (certificatesFound) {
- execFileSync('signtool', 'verify', '/pa', 'SHA256', targetExecutable);
+ spawnSyncAndExitWithoutError('signtool', 'verify', '/pa', 'SHA256', targetExecutable, {});
}
}
}
diff --git a/test/doctool/test-doctool-html.mjs b/test/doctool/test-doctool-html.mjs
index 6fbca131851f3b..08a6af4299c33e 100644
--- a/test/doctool/test-doctool-html.mjs
+++ b/test/doctool/test-doctool-html.mjs
@@ -138,11 +138,11 @@ const testData = [
},
{
file: fixtures.path('document_with_esm_and_cjs_code_snippet.md'),
- html: '= 3);
+ // and --embedder-snapshot-create (which is filtered, so at least
+ // 2 arguments should remain after filtering).
+ assert(filtered_args.size() >= 2);
+ // Insert an anonymous filename as process.argv[1].
filtered_args.insert(filtered_args.begin() + 1,
node::GetAnonymousMainPath());
}
@@ -153,19 +154,26 @@ int RunNodeInstance(MultiIsolatePlatform* platform,
Context::Scope context_scope(setup->context());
MaybeLocal loadenv_ret;
- if (snapshot) {
+ if (snapshot) { // Deserializing snapshot
loadenv_ret = node::LoadEnvironment(env, node::StartExecutionCallback{});
- } else {
+ } else if (is_building_snapshot) {
+ // Environment created for snapshotting must set process.argv[1] to
+ // the name of the main script, which was inserted above.
loadenv_ret = node::LoadEnvironment(
env,
- // Snapshots do not support userland require()s (yet)
- "if (!require('v8').startupSnapshot.isBuildingSnapshot()) {"
- " const publicRequire ="
- " require('module').createRequire(process.cwd() + '/');"
- " globalThis.require = publicRequire;"
- "} else globalThis.require = require;"
+ "const assert = require('assert');"
+ "assert(require('v8').startupSnapshot.isBuildingSnapshot());"
"globalThis.embedVars = { nön_ascıı: '🏳️🌈' };"
+ "globalThis.require = require;"
"require('vm').runInThisContext(process.argv[2]);");
+ } else {
+ loadenv_ret = node::LoadEnvironment(
+ env,
+ "const publicRequire = require('module').createRequire(process.cwd() "
+ "+ '/');"
+ "globalThis.require = publicRequire;"
+ "globalThis.embedVars = { nön_ascıı: '🏳️🌈' };"
+ "require('vm').runInThisContext(process.argv[1]);");
}
if (loadenv_ret.IsEmpty()) // There has been a JS exception.
diff --git a/test/embedding/test-embedding.js b/test/embedding/test-embedding.js
index 5d448b78a433e8..1fb3bc73f494cb 100644
--- a/test/embedding/test-embedding.js
+++ b/test/embedding/test-embedding.js
@@ -3,7 +3,10 @@ const common = require('../common');
const fixtures = require('../common/fixtures');
const tmpdir = require('../common/tmpdir');
const assert = require('assert');
-const child_process = require('child_process');
+const {
+ spawnSyncAndExit,
+ spawnSyncAndExitWithoutError,
+} = require('../common/child_process');
const path = require('path');
const fs = require('fs');
@@ -21,39 +24,54 @@ function resolveBuiltBinary(bin) {
const binary = resolveBuiltBinary('embedtest');
-assert.strictEqual(
- child_process.spawnSync(binary, ['console.log(42)'])
- .stdout.toString().trim(),
- '42');
-
-assert.strictEqual(
- child_process.spawnSync(binary, ['console.log(embedVars.nön_ascıı)'])
- .stdout.toString().trim(),
- '🏳️🌈');
-
-assert.strictEqual(
- child_process.spawnSync(binary, ['console.log(42)'])
- .stdout.toString().trim(),
- '42');
+spawnSyncAndExitWithoutError(
+ binary,
+ ['console.log(42)'],
+ {
+ trim: true,
+ stdout: '42',
+ });
-assert.strictEqual(
- child_process.spawnSync(binary, ['throw new Error()']).status,
- 1);
+spawnSyncAndExitWithoutError(
+ binary,
+ ['console.log(embedVars.nön_ascıı)'],
+ {
+ trim: true,
+ stdout: '🏳️🌈',
+ });
-// Cannot require internals anymore:
-assert.strictEqual(
- child_process.spawnSync(binary, ['require("lib/internal/test/binding")']).status,
- 1);
+spawnSyncAndExit(
+ binary,
+ ['throw new Error()'],
+ {
+ status: 1,
+ signal: null,
+ });
-assert.strictEqual(
- child_process.spawnSync(binary, ['process.exitCode = 8']).status,
- 8);
+spawnSyncAndExit(
+ binary,
+ ['require("lib/internal/test/binding")'],
+ {
+ status: 1,
+ signal: null,
+ });
+spawnSyncAndExit(
+ binary,
+ ['process.exitCode = 8'],
+ {
+ status: 8,
+ signal: null,
+ });
const fixturePath = JSON.stringify(fixtures.path('exit.js'));
-assert.strictEqual(
- child_process.spawnSync(binary, [`require(${fixturePath})`, 92]).status,
- 92);
+spawnSyncAndExit(
+ binary,
+ [`require(${fixturePath})`, 92],
+ {
+ status: 92,
+ signal: null,
+ });
function getReadFileCodeForPath(path) {
return `(require("fs").readFileSync(${JSON.stringify(path)}, "utf8"))`;
@@ -64,31 +82,49 @@ for (const extraSnapshotArgs of [[], ['--embedder-snapshot-as-file']]) {
// readSync + eval since snapshots don't support userland require() (yet)
const snapshotFixture = fixtures.path('snapshot', 'echo-args.js');
const blobPath = tmpdir.resolve('embedder-snapshot.blob');
- const buildSnapshotArgs = [
+ const buildSnapshotExecArgs = [
`eval(${getReadFileCodeForPath(snapshotFixture)})`, 'arg1', 'arg2',
+ ];
+ const embedTestBuildArgs = [
'--embedder-snapshot-blob', blobPath, '--embedder-snapshot-create',
...extraSnapshotArgs,
];
- const runEmbeddedArgs = [
- '--embedder-snapshot-blob', blobPath, ...extraSnapshotArgs, 'arg3', 'arg4',
+ const buildSnapshotArgs = [
+ ...buildSnapshotExecArgs,
+ ...embedTestBuildArgs,
+ ];
+
+ const runSnapshotExecArgs = [
+ 'arg3', 'arg4',
+ ];
+ const embedTestRunArgs = [
+ '--embedder-snapshot-blob', blobPath,
+ ...extraSnapshotArgs,
+ ];
+ const runSnapshotArgs = [
+ ...runSnapshotExecArgs,
+ ...embedTestRunArgs,
];
fs.rmSync(blobPath, { force: true });
- const child = child_process.spawnSync(binary, [
- '--', ...buildSnapshotArgs,
- ], {
- cwd: tmpdir.path,
- });
- if (child.status !== 0) {
- console.log(child.stderr.toString());
- console.log(child.stdout.toString());
- }
- assert.strictEqual(child.status, 0);
- const spawnResult = child_process.spawnSync(binary, ['--', ...runEmbeddedArgs]);
- assert.deepStrictEqual(JSON.parse(spawnResult.stdout), {
- originalArgv: [binary, ...buildSnapshotArgs],
- currentArgv: [binary, ...runEmbeddedArgs],
- });
+ spawnSyncAndExitWithoutError(
+ binary,
+ [ '--', ...buildSnapshotArgs ],
+ { cwd: tmpdir.path },
+ {});
+ spawnSyncAndExitWithoutError(
+ binary,
+ [ '--', ...runSnapshotArgs ],
+ { cwd: tmpdir.path },
+ {
+ stdout(output) {
+ assert.deepStrictEqual(JSON.parse(output), {
+ originalArgv: [binary, '__node_anonymous_main', ...buildSnapshotExecArgs],
+ currentArgv: [binary, ...runSnapshotExecArgs],
+ });
+ return true;
+ },
+ });
}
// Create workers and vm contexts after deserialization
@@ -99,14 +135,20 @@ for (const extraSnapshotArgs of [[], ['--embedder-snapshot-as-file']]) {
`eval(${getReadFileCodeForPath(snapshotFixture)})`,
'--embedder-snapshot-blob', blobPath, '--embedder-snapshot-create',
];
+ const runEmbeddedArgs = [
+ '--embedder-snapshot-blob', blobPath,
+ ];
fs.rmSync(blobPath, { force: true });
- assert.strictEqual(child_process.spawnSync(binary, [
- '--', ...buildSnapshotArgs,
- ], {
- cwd: tmpdir.path,
- }).status, 0);
- assert.strictEqual(
- child_process.spawnSync(binary, ['--', '--embedder-snapshot-blob', blobPath]).status,
- 0);
+
+ spawnSyncAndExitWithoutError(
+ binary,
+ [ '--', ...buildSnapshotArgs ],
+ { cwd: tmpdir.path },
+ {});
+ spawnSyncAndExitWithoutError(
+ binary,
+ [ '--', ...runEmbeddedArgs ],
+ { cwd: tmpdir.path },
+ {});
}
diff --git a/test/es-module/test-cjs-esm-warn.js b/test/es-module/test-cjs-esm-warn.js
index c1d60a209502bb..7ac85fd58c5f18 100644
--- a/test/es-module/test-cjs-esm-warn.js
+++ b/test/es-module/test-cjs-esm-warn.js
@@ -31,7 +31,7 @@ describe('CJS ↔︎ ESM interop warnings', { concurrency: true }, () => {
);
assert.ok(
stderr.replaceAll('\r', '').includes(
- `Instead rename ${basename} to end in .cjs, change the requiring ` +
+ `Instead either rename ${basename} to end in .cjs, change the requiring ` +
'code to use dynamic import() which is available in all CommonJS ' +
`modules, or change "type": "module" to "type": "commonjs" in ${pjson} to ` +
'treat all .js files as CommonJS (using .mjs for all ES modules ' +
diff --git a/test/es-module/test-esm-experimental-warnings.mjs b/test/es-module/test-esm-experimental-warnings.mjs
index 85b458258b6134..d499aae0afc1cd 100644
--- a/test/es-module/test-esm-experimental-warnings.mjs
+++ b/test/es-module/test-esm-experimental-warnings.mjs
@@ -24,14 +24,18 @@ describe('ESM: warn for obsolete hooks provided', { concurrency: true }, () => {
describe('experimental warnings for enabled experimental feature', () => {
for (
- const [experiment, arg] of [
- [/Custom ESM Loaders/, `--experimental-loader=${fileURL('es-module-loaders', 'hooks-custom.mjs')}`],
+ const [experiment, ...args] of [
+ [
+ /`--experimental-loader` may be removed in the future/,
+ '--experimental-loader',
+ fileURL('es-module-loaders', 'hooks-custom.mjs'),
+ ],
[/Network Imports/, '--experimental-network-imports'],
]
) {
it(`should print for ${experiment.toString().replaceAll('/', '')}`, async () => {
const { code, signal, stderr } = await spawnPromisified(execPath, [
- arg,
+ ...args,
'--input-type=module',
'--eval',
`import ${JSON.stringify(fileURL('es-module-loaders', 'module-named-exports.mjs'))}`,
diff --git a/test/es-module/test-esm-import-meta-resolve.mjs b/test/es-module/test-esm-import-meta-resolve.mjs
index 8495c161312822..a6435655750c88 100644
--- a/test/es-module/test-esm-import-meta-resolve.mjs
+++ b/test/es-module/test-esm-import-meta-resolve.mjs
@@ -9,10 +9,8 @@ const fixtures = dirname.slice(0, dirname.lastIndexOf('/', dirname.length - 2) +
assert.strictEqual(import.meta.resolve('./test-esm-import-meta.mjs'),
dirname + 'test-esm-import-meta.mjs');
-const notFound = import.meta.resolve('./notfound.mjs');
-assert.strictEqual(new URL(notFound).href, new URL('./notfound.mjs', import.meta.url).href);
-const noExtension = import.meta.resolve('./asset');
-assert.strictEqual(new URL(noExtension).href, new URL('./asset', import.meta.url).href);
+assert.strictEqual(import.meta.resolve('./notfound.mjs'), new URL('./notfound.mjs', import.meta.url).href);
+assert.strictEqual(import.meta.resolve('./asset'), new URL('./asset', import.meta.url).href);
try {
import.meta.resolve('does-not-exist');
assert.fail();
diff --git a/test/es-module/test-esm-loader-default-resolver.mjs b/test/es-module/test-esm-loader-default-resolver.mjs
index 27320fcfcfe862..2a69010e05047f 100644
--- a/test/es-module/test-esm-loader-default-resolver.mjs
+++ b/test/es-module/test-esm-loader-default-resolver.mjs
@@ -49,4 +49,18 @@ describe('default resolver', () => {
assert.strictEqual(stdout.trim(), 'index.byoe!');
assert.strictEqual(stderr, '');
});
+
+ it('should identify the parent module of an invalid URL host in import specifier', async () => {
+ if (process.platform === 'win32') return;
+
+ const { code, stderr } = await spawnPromisified(execPath, [
+ '--no-warnings',
+ fixtures.path('es-modules', 'invalid-posix-host.mjs'),
+ ]);
+
+ assert.match(stderr, /ERR_INVALID_FILE_URL_HOST/);
+ assert.match(stderr, /file:\/\/hmm\.js/);
+ assert.match(stderr, /invalid-posix-host\.mjs/);
+ assert.strictEqual(code, 1);
+ });
});
diff --git a/test/es-module/test-esm-loader-globalpreload-hook.mjs b/test/es-module/test-esm-loader-globalpreload-hook.mjs
new file mode 100644
index 00000000000000..87def31fb3d0ea
--- /dev/null
+++ b/test/es-module/test-esm-loader-globalpreload-hook.mjs
@@ -0,0 +1,149 @@
+import { spawnPromisified } from '../common/index.mjs';
+import * as fixtures from '../common/fixtures.mjs';
+import assert from 'node:assert';
+import os from 'node:os';
+import { execPath } from 'node:process';
+import { describe, it } from 'node:test';
+
+describe('globalPreload hook', () => {
+ it('should not emit deprecation warning when initialize is supplied', async () => {
+ const { stderr } = await spawnPromisified(execPath, [
+ '--experimental-loader',
+ 'data:text/javascript,export function globalPreload(){}export function initialize(){}',
+ fixtures.path('empty.js'),
+ ]);
+
+ assert.doesNotMatch(stderr, /`globalPreload` is an experimental feature/);
+ });
+
+ it('should handle globalPreload returning undefined', async () => {
+ const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
+ '--no-warnings',
+ '--experimental-loader',
+ 'data:text/javascript,export function globalPreload(){}',
+ fixtures.path('empty.js'),
+ ]);
+
+ assert.strictEqual(stderr, '');
+ assert.strictEqual(stdout, '');
+ assert.strictEqual(code, 0);
+ assert.strictEqual(signal, null);
+ });
+
+ it('should handle loading node:test', async () => {
+ const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
+ '--no-warnings',
+ '--experimental-loader',
+ 'data:text/javascript,export function globalPreload(){return `getBuiltin("node:test")()`}',
+ fixtures.path('empty.js'),
+ ]);
+
+ assert.strictEqual(stderr, '');
+ assert.match(stdout, /\n# pass 1\r?\n/);
+ assert.strictEqual(code, 0);
+ assert.strictEqual(signal, null);
+ });
+
+ it('should handle loading node:os with node: prefix', async () => {
+ const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
+ '--no-warnings',
+ '--experimental-loader',
+ 'data:text/javascript,export function globalPreload(){return `console.log(getBuiltin("node:os").arch())`}',
+ fixtures.path('empty.js'),
+ ]);
+
+ assert.strictEqual(stderr, '');
+ assert.strictEqual(stdout.trim(), os.arch());
+ assert.strictEqual(code, 0);
+ assert.strictEqual(signal, null);
+ });
+
+ // `os` is used here because it's simple and not mocked (the builtin module otherwise doesn't matter).
+ it('should handle loading builtin module without node: prefix', async () => {
+ const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
+ '--no-warnings',
+ '--experimental-loader',
+ 'data:text/javascript,export function globalPreload(){return `console.log(getBuiltin("os").arch())`}',
+ fixtures.path('empty.js'),
+ ]);
+
+ assert.strictEqual(stderr, '');
+ assert.strictEqual(stdout.trim(), os.arch());
+ assert.strictEqual(code, 0);
+ assert.strictEqual(signal, null);
+ });
+
+ it('should throw when loading node:test without node: prefix', async () => {
+ const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
+ '--no-warnings',
+ '--experimental-loader',
+ 'data:text/javascript,export function globalPreload(){return `getBuiltin("test")()`}',
+ fixtures.path('empty.js'),
+ ]);
+
+ assert.match(stderr, /ERR_UNKNOWN_BUILTIN_MODULE/);
+ assert.strictEqual(stdout, '');
+ assert.strictEqual(code, 1);
+ assert.strictEqual(signal, null);
+ });
+
+ it('should register globals set from globalPreload', async () => {
+ const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
+ '--no-warnings',
+ '--experimental-loader',
+ 'data:text/javascript,export function globalPreload(){return "this.myGlobal=4"}',
+ '--print', 'myGlobal',
+ ]);
+
+ assert.strictEqual(stderr, '');
+ assert.strictEqual(stdout.trim(), '4');
+ assert.strictEqual(code, 0);
+ assert.strictEqual(signal, null);
+ });
+
+ it('should log console.log calls returned from globalPreload', async () => {
+ const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
+ '--no-warnings',
+ '--experimental-loader',
+ 'data:text/javascript,export function globalPreload(){return `console.log("Hello from globalPreload")`}',
+ fixtures.path('empty.js'),
+ ]);
+
+ assert.strictEqual(stderr, '');
+ assert.strictEqual(stdout.trim(), 'Hello from globalPreload');
+ assert.strictEqual(code, 0);
+ assert.strictEqual(signal, null);
+ });
+
+ it('should crash if globalPreload returns code that throws', async () => {
+ const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
+ '--no-warnings',
+ '--experimental-loader',
+ 'data:text/javascript,export function globalPreload(){return `throw new Error("error from globalPreload")`}',
+ fixtures.path('empty.js'),
+ ]);
+
+ assert.match(stderr, /error from globalPreload/);
+ assert.strictEqual(stdout, '');
+ assert.strictEqual(code, 1);
+ assert.strictEqual(signal, null);
+ });
+
+ it('should have a `this` value that is not bound to the loader instance', async () => {
+ const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
+ '--no-warnings',
+ '--experimental-loader',
+ `data:text/javascript,export ${function globalPreload() {
+ if (this != null) {
+ throw new Error('hook function must not be bound to ESMLoader instance');
+ }
+ }}`,
+ fixtures.path('empty.js'),
+ ]);
+
+ assert.strictEqual(stderr, '');
+ assert.strictEqual(stdout, '');
+ assert.strictEqual(code, 0);
+ assert.strictEqual(signal, null);
+ });
+});
diff --git a/test/es-module/test-esm-loader-hooks.mjs b/test/es-module/test-esm-loader-hooks.mjs
index 2ea0128596e25b..6544c05ec93f8e 100644
--- a/test/es-module/test-esm-loader-hooks.mjs
+++ b/test/es-module/test-esm-loader-hooks.mjs
@@ -1,7 +1,6 @@
import { spawnPromisified } from '../common/index.mjs';
import * as fixtures from '../common/fixtures.mjs';
import assert from 'node:assert';
-import os from 'node:os';
import { execPath } from 'node:process';
import { describe, it } from 'node:test';
@@ -423,7 +422,7 @@ describe('Loader hooks', { concurrency: true }, () => {
});
describe('globalPreload', () => {
- it('should emit deprecation warning', async () => {
+ it('should emit warning', async () => {
const { stderr } = await spawnPromisified(execPath, [
'--experimental-loader',
'data:text/javascript,export function globalPreload(){}',
@@ -434,290 +433,266 @@ describe('Loader hooks', { concurrency: true }, () => {
assert.strictEqual(stderr.match(/`globalPreload` is an experimental feature/g).length, 1);
});
+ });
- it('should not emit deprecation warning when initialize is supplied', async () => {
- const { stderr } = await spawnPromisified(execPath, [
- '--experimental-loader',
- 'data:text/javascript,export function globalPreload(){}export function initialize(){}',
- fixtures.path('empty.js'),
- ]);
+ it('should be fine to call `process.removeAllListeners("beforeExit")` from the main thread', async () => {
+ const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
+ '--no-warnings',
+ '--experimental-loader',
+ 'data:text/javascript,export function load(a,b,c){return new Promise(d=>setTimeout(()=>d(c(a,b)),99))}',
+ '--input-type=module',
+ '--eval',
+ 'setInterval(() => process.removeAllListeners("beforeExit"),1).unref();await import("data:text/javascript,")',
+ ]);
- assert.doesNotMatch(stderr, /`globalPreload` is an experimental feature/);
- });
+ assert.strictEqual(stderr, '');
+ assert.strictEqual(stdout, '');
+ assert.strictEqual(code, 0);
+ assert.strictEqual(signal, null);
+ });
- it('should handle globalPreload returning undefined', async () => {
+ describe('`initialize`/`register`', () => {
+ it('should invoke `initialize` correctly', async () => {
const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
'--no-warnings',
'--experimental-loader',
- 'data:text/javascript,export function globalPreload(){}',
- fixtures.path('empty.js'),
+ fixtures.fileURL('es-module-loaders/hooks-initialize.mjs'),
+ '--input-type=module',
+ '--eval',
+ 'import os from "node:os";',
]);
assert.strictEqual(stderr, '');
- assert.strictEqual(stdout, '');
+ assert.deepStrictEqual(stdout.split('\n'), ['hooks initialize 1', '']);
assert.strictEqual(code, 0);
assert.strictEqual(signal, null);
});
- it('should handle loading node:test', async () => {
+ it('should allow communicating with loader via `register` ports', async () => {
const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
'--no-warnings',
- '--experimental-loader',
- 'data:text/javascript,export function globalPreload(){return `getBuiltin("node:test")()`}',
- fixtures.path('empty.js'),
+ '--input-type=module',
+ '--eval',
+ `
+ import {MessageChannel} from 'node:worker_threads';
+ import {register} from 'node:module';
+ import {once} from 'node:events';
+ const {port1, port2} = new MessageChannel();
+ port1.on('message', (msg) => {
+ console.log('message', msg);
+ });
+ const result = register(
+ ${JSON.stringify(fixtures.fileURL('es-module-loaders/hooks-initialize-port.mjs'))},
+ {data: port2, transferList: [port2]},
+ );
+ console.log('register', result);
+
+ const timeout = setTimeout(() => {}, 2**31 - 1); // to keep the process alive.
+ await Promise.all([
+ once(port1, 'message').then(() => once(port1, 'message')),
+ import('node:os'),
+ ]);
+ clearTimeout(timeout);
+ port1.close();
+ `,
]);
assert.strictEqual(stderr, '');
- assert.match(stdout, /\n# pass 1\r?\n/);
+ assert.deepStrictEqual(stdout.split('\n'), [ 'register undefined',
+ 'message initialize',
+ 'message resolve node:os',
+ '' ]);
+
assert.strictEqual(code, 0);
assert.strictEqual(signal, null);
});
- it('should handle loading node:os with node: prefix', async () => {
+ it('should have `register` accept URL objects as `parentURL`', async () => {
const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
'--no-warnings',
- '--experimental-loader',
- 'data:text/javascript,export function globalPreload(){return `console.log(getBuiltin("node:os").arch())`}',
- fixtures.path('empty.js'),
- ]);
+ '--import',
+ `data:text/javascript,${encodeURIComponent(
+ 'import{ register } from "node:module";' +
+ 'import { pathToFileURL } from "node:url";' +
+ 'register("./hooks-initialize.mjs", pathToFileURL("./"));'
+ )}`,
+ '--input-type=module',
+ '--eval',
+ `
+ import {register} from 'node:module';
+ register(
+ ${JSON.stringify(fixtures.fileURL('es-module-loaders/loader-load-foo-or-42.mjs'))},
+ new URL('data:'),
+ );
+
+ import('node:os').then((result) => {
+ console.log(JSON.stringify(result));
+ });
+ `,
+ ], { cwd: fixtures.fileURL('es-module-loaders/') });
assert.strictEqual(stderr, '');
- assert.strictEqual(stdout.trim(), os.arch());
+ assert.deepStrictEqual(stdout.split('\n').sort(), ['hooks initialize 1', '{"default":"foo"}', ''].sort());
+
assert.strictEqual(code, 0);
assert.strictEqual(signal, null);
});
- // `os` is used here because it's simple and not mocked (the builtin module otherwise doesn't matter).
- it('should handle loading builtin module without node: prefix', async () => {
+ it('should have `register` work with cjs', async () => {
const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
'--no-warnings',
- '--experimental-loader',
- 'data:text/javascript,export function globalPreload(){return `console.log(getBuiltin("os").arch())`}',
- fixtures.path('empty.js'),
+ '--input-type=commonjs',
+ '--eval',
+ `
+ 'use strict';
+ const {register} = require('node:module');
+ register(
+ ${JSON.stringify(fixtures.fileURL('es-module-loaders/hooks-initialize.mjs'))},
+ );
+ register(
+ ${JSON.stringify(fixtures.fileURL('es-module-loaders/loader-load-foo-or-42.mjs'))},
+ );
+
+ import('node:os').then((result) => {
+ console.log(JSON.stringify(result));
+ });
+ `,
]);
assert.strictEqual(stderr, '');
- assert.strictEqual(stdout.trim(), os.arch());
+ assert.deepStrictEqual(stdout.split('\n').sort(), ['hooks initialize 1', '{"default":"foo"}', ''].sort());
+
assert.strictEqual(code, 0);
assert.strictEqual(signal, null);
});
- it('should throw when loading node:test without node: prefix', async () => {
+ it('`register` should work with `require`', async () => {
const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
'--no-warnings',
- '--experimental-loader',
- 'data:text/javascript,export function globalPreload(){return `getBuiltin("test")()`}',
- fixtures.path('empty.js'),
+ '--require',
+ fixtures.path('es-module-loaders/register-loader.cjs'),
+ '--input-type=module',
+ '--eval',
+ 'import "node:os";',
]);
- assert.match(stderr, /ERR_UNKNOWN_BUILTIN_MODULE/);
- assert.strictEqual(stdout, '');
- assert.strictEqual(code, 1);
+ assert.strictEqual(stderr, '');
+ assert.deepStrictEqual(stdout.split('\n'), ['resolve passthru', 'resolve passthru', '']);
+ assert.strictEqual(code, 0);
assert.strictEqual(signal, null);
});
- it('should register globals set from globalPreload', async () => {
+ it('`register` should work with `import`', async () => {
const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
'--no-warnings',
- '--experimental-loader',
- 'data:text/javascript,export function globalPreload(){return "this.myGlobal=4"}',
- '--print', 'myGlobal',
+ '--import',
+ fixtures.fileURL('es-module-loaders/register-loader.mjs'),
+ '--input-type=module',
+ '--eval',
+ 'import "node:os"',
]);
assert.strictEqual(stderr, '');
- assert.strictEqual(stdout.trim(), '4');
+ assert.deepStrictEqual(stdout.split('\n'), ['resolve passthru', '']);
assert.strictEqual(code, 0);
assert.strictEqual(signal, null);
});
- it('should log console.log calls returned from globalPreload', async () => {
+ it('should execute `initialize` in sequence', async () => {
const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
'--no-warnings',
- '--experimental-loader',
- 'data:text/javascript,export function globalPreload(){return `console.log("Hello from globalPreload")`}',
- fixtures.path('empty.js'),
+ '--input-type=module',
+ '--eval',
+ `
+ import {register} from 'node:module';
+ console.log('result 1', register(
+ ${JSON.stringify(fixtures.fileURL('es-module-loaders/hooks-initialize.mjs'))}
+ ));
+ console.log('result 2', register(
+ ${JSON.stringify(fixtures.fileURL('es-module-loaders/hooks-initialize.mjs'))}
+ ));
+
+ await import('node:os');
+ `,
]);
assert.strictEqual(stderr, '');
- assert.strictEqual(stdout.trim(), 'Hello from globalPreload');
+ assert.deepStrictEqual(stdout.split('\n'), [ 'hooks initialize 1',
+ 'result 1 undefined',
+ 'hooks initialize 2',
+ 'result 2 undefined',
+ '' ]);
assert.strictEqual(code, 0);
assert.strictEqual(signal, null);
});
- it('should crash if globalPreload returns code that throws', async () => {
+ it('should handle `initialize` returning never-settling promise', async () => {
const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
'--no-warnings',
- '--experimental-loader',
- 'data:text/javascript,export function globalPreload(){return `throw new Error("error from globalPreload")`}',
- fixtures.path('empty.js'),
+ '--input-type=module',
+ '--eval',
+ `
+ import {register} from 'node:module';
+ register('data:text/javascript,export function initialize(){return new Promise(()=>{})}');
+ `,
]);
- assert.match(stderr, /error from globalPreload/);
+ assert.strictEqual(stderr, '');
assert.strictEqual(stdout, '');
- assert.strictEqual(code, 1);
+ assert.strictEqual(code, 13);
assert.strictEqual(signal, null);
});
- });
-
- it('should be fine to call `process.removeAllListeners("beforeExit")` from the main thread', async () => {
- const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
- '--no-warnings',
- '--experimental-loader',
- 'data:text/javascript,export function load(a,b,c){return new Promise(d=>setTimeout(()=>d(c(a,b)),99))}',
- '--input-type=module',
- '--eval',
- 'setInterval(() => process.removeAllListeners("beforeExit"),1).unref();await import("data:text/javascript,")',
- ]);
-
- assert.strictEqual(stderr, '');
- assert.strictEqual(stdout, '');
- assert.strictEqual(code, 0);
- assert.strictEqual(signal, null);
- });
-
- it('should invoke `initialize` correctly', async () => {
- const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
- '--no-warnings',
- '--experimental-loader',
- fixtures.fileURL('es-module-loaders/hooks-initialize.mjs'),
- '--input-type=module',
- '--eval',
- 'import os from "node:os";',
- ]);
-
- assert.strictEqual(stderr, '');
- assert.deepStrictEqual(stdout.split('\n'), ['hooks initialize 1', '']);
- assert.strictEqual(code, 0);
- assert.strictEqual(signal, null);
- });
- it('should allow communicating with loader via `register` ports', async () => {
- const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
- '--no-warnings',
- '--input-type=module',
- '--eval',
- `
- import {MessageChannel} from 'node:worker_threads';
+ it('should handle `initialize` returning rejecting promise', async () => {
+ const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
+ '--no-warnings',
+ '--input-type=module',
+ '--eval',
+ `
import {register} from 'node:module';
- import {once} from 'node:events';
- const {port1, port2} = new MessageChannel();
- port1.on('message', (msg) => {
- console.log('message', msg);
- });
- const result = register(
- ${JSON.stringify(fixtures.fileURL('es-module-loaders/hooks-initialize-port.mjs'))},
- {data: port2, transferList: [port2]},
- );
- console.log('register', result);
-
- const timeout = setTimeout(() => {}, 2**31 - 1); // to keep the process alive.
- await Promise.all([
- once(port1, 'message').then(() => once(port1, 'message')),
- import('node:os'),
- ]);
- clearTimeout(timeout);
- port1.close();
- `,
- ]);
-
- assert.strictEqual(stderr, '');
- assert.deepStrictEqual(stdout.split('\n'), [ 'register ok',
- 'message initialize',
- 'message resolve node:os',
- '' ]);
-
- assert.strictEqual(code, 0);
- assert.strictEqual(signal, null);
- });
-
- it('should have `register` work with cjs', async () => {
- const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
- '--no-warnings',
- '--input-type=commonjs',
- '--eval',
- `
- 'use strict';
- const {register} = require('node:module');
- register(
- ${JSON.stringify(fixtures.fileURL('es-module-loaders/hooks-initialize.mjs'))},
- );
- register(
- ${JSON.stringify(fixtures.fileURL('es-module-loaders/loader-load-foo-or-42.mjs'))},
- );
-
- import('node:os').then((result) => {
- console.log(JSON.stringify(result));
- });
- `,
- ]);
-
- assert.strictEqual(stderr, '');
- assert.deepStrictEqual(stdout.split('\n').sort(), ['hooks initialize 1', '{"default":"foo"}', ''].sort());
-
- assert.strictEqual(code, 0);
- assert.strictEqual(signal, null);
- });
-
- it('`register` should work with `require`', async () => {
- const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
- '--no-warnings',
- '--require',
- fixtures.path('es-module-loaders/register-loader.cjs'),
- '--input-type=module',
- '--eval',
- 'import "node:os";',
- ]);
+ register('data:text/javascript,export function initialize(){return Promise.reject()}');
+ `,
+ ]);
- assert.strictEqual(stderr, '');
- assert.deepStrictEqual(stdout.split('\n'), ['resolve passthru', 'resolve passthru', '']);
- assert.strictEqual(code, 0);
- assert.strictEqual(signal, null);
- });
+ assert.match(stderr, /undefined\r?\n/);
+ assert.strictEqual(stdout, '');
+ assert.strictEqual(code, 1);
+ assert.strictEqual(signal, null);
+ });
- it('`register` should work with `import`', async () => {
- const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
- '--no-warnings',
- '--import',
- fixtures.fileURL('es-module-loaders/register-loader.mjs'),
- '--input-type=module',
- '--eval',
- `
- import 'node:os';
- `,
- ]);
+ it('should handle `initialize` throwing null', async () => {
+ const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
+ '--no-warnings',
+ '--input-type=module',
+ '--eval',
+ `
+ import {register} from 'node:module';
+ register('data:text/javascript,export function initialize(){throw null}');
+ `,
+ ]);
- assert.strictEqual(stderr, '');
- assert.deepStrictEqual(stdout.split('\n'), ['resolve passthru', '']);
- assert.strictEqual(code, 0);
- assert.strictEqual(signal, null);
- });
+ assert.match(stderr, /null\r?\n/);
+ assert.strictEqual(stdout, '');
+ assert.strictEqual(code, 1);
+ assert.strictEqual(signal, null);
+ });
- it('should execute `initialize` in sequence', async () => {
- const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
- '--no-warnings',
- '--input-type=module',
- '--eval',
- `
+ it('should be fine to call `process.exit` from a initialize hook', async () => {
+ const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [
+ '--no-warnings',
+ '--input-type=module',
+ '--eval',
+ `
import {register} from 'node:module';
- console.log('result', register(
- ${JSON.stringify(fixtures.fileURL('es-module-loaders/hooks-initialize.mjs'))}
- ));
- console.log('result', register(
- ${JSON.stringify(fixtures.fileURL('es-module-loaders/hooks-initialize.mjs'))}
- ));
-
- await import('node:os');
- `,
- ]);
+ register('data:text/javascript,export function initialize(){process.exit(42);}');
+ `,
+ ]);
- assert.strictEqual(stderr, '');
- assert.deepStrictEqual(stdout.split('\n'), [ 'hooks initialize 1',
- 'result 1',
- 'hooks initialize 2',
- 'result 2',
- '' ]);
- assert.strictEqual(code, 0);
- assert.strictEqual(signal, null);
+ assert.strictEqual(stderr, '');
+ assert.strictEqual(stdout, '');
+ assert.strictEqual(code, 42);
+ assert.strictEqual(signal, null);
+ });
});
it('should use CJS loader to respond to require.resolve calls by default', async () => {
diff --git a/test/es-module/test-esm-loader-mock.mjs b/test/es-module/test-esm-loader-mock.mjs
index 2783bf694d239a..164d0ac3775039 100644
--- a/test/es-module/test-esm-loader-mock.mjs
+++ b/test/es-module/test-esm-loader-mock.mjs
@@ -1,9 +1,6 @@
-// Flags: --loader ./test/fixtures/es-module-loaders/mock-loader.mjs
import '../common/index.mjs';
-import assert from 'assert/strict';
-
-// This is provided by test/fixtures/es-module-loaders/mock-loader.mjs
-import mock from 'node:mock';
+import assert from 'node:assert/strict';
+import { mock } from '../fixtures/es-module-loaders/mock.mjs';
mock('node:events', {
EventEmitter: 'This is mocked!'
diff --git a/test/es-module/test-esm-loader-resolve-type.mjs b/test/es-module/test-esm-loader-resolve-type.mjs
index 482320c664c5d8..4c5e7aede8fccc 100644
--- a/test/es-module/test-esm-loader-resolve-type.mjs
+++ b/test/es-module/test-esm-loader-resolve-type.mjs
@@ -1,44 +1,46 @@
-// Flags: --loader ./test/fixtures/es-module-loaders/hook-resolve-type.mjs
-import { allowGlobals } from '../common/index.mjs';
+import { spawnPromisified } from '../common/index.mjs';
+import * as tmpdir from '../common/tmpdir.js';
import * as fixtures from '../common/fixtures.mjs';
-import { strict as assert } from 'assert';
-import * as fs from 'fs';
-
-allowGlobals(global.getModuleTypeStats);
-
-const { importedESM: importedESMBefore,
- importedCJS: importedCJSBefore } = await global.getModuleTypeStats();
-
-const basePath =
- new URL('./node_modules/', import.meta.url);
-
-const rel = (file) => new URL(file, basePath);
-const createDir = (path) => {
- if (!fs.existsSync(path)) {
- fs.mkdirSync(path);
- }
-};
+import { deepStrictEqual } from 'node:assert';
+import { mkdir, rm, cp } from 'node:fs/promises';
+import { execPath } from 'node:process';
+const base = tmpdir.fileURL(`test-esm-loader-resolve-type-${(Math.random() * Date.now()).toFixed(0)}`);
const moduleName = 'module-counter-by-type';
-const moduleDir = rel(`${moduleName}`);
+const moduleURL = new URL(`${base}/node_modules/${moduleName}`);
try {
- createDir(basePath);
- createDir(moduleDir);
- fs.cpSync(
- fixtures.path('es-modules', moduleName),
- moduleDir,
+ await mkdir(moduleURL, { recursive: true });
+ await cp(
+ fixtures.path('es-modules', 'module-counter-by-type'),
+ moduleURL,
{ recursive: true }
);
+ const output = await spawnPromisified(
+ execPath,
+ [
+ '--no-warnings',
+ '--input-type=module',
+ '--eval',
+ `import { getModuleTypeStats } from ${JSON.stringify(fixtures.fileURL('es-module-loaders', 'hook-resolve-type.mjs'))};
+ const before = getModuleTypeStats();
+ await import(${JSON.stringify(moduleName)});
+ const after = getModuleTypeStats();
+ console.log(JSON.stringify({ before, after }));`,
+ ],
+ { cwd: base },
+ );
- await import(`${moduleName}`);
+ deepStrictEqual(output, {
+ code: 0,
+ signal: null,
+ stderr: '',
+ stdout: JSON.stringify({
+ before: { importedESM: 0, importedCJS: 0 },
+ // Dynamic import in the eval script should increment ESM counter but not CJS counter
+ after: { importedESM: 1, importedCJS: 0 },
+ }) + '\n',
+ });
} finally {
- fs.rmSync(basePath, { recursive: true, force: true });
+ await rm(base, { recursive: true, force: true });
}
-
-const { importedESM: importedESMAfter,
- importedCJS: importedCJSAfter } = await global.getModuleTypeStats();
-
-// Dynamic import above should increment ESM counter but not CJS counter
-assert.strictEqual(importedESMBefore + 1, importedESMAfter);
-assert.strictEqual(importedCJSBefore, importedCJSAfter);
diff --git a/test/es-module/test-esm-named-exports.js b/test/es-module/test-esm-named-exports.js
index 4e10aaca635539..2c6f67288aa57c 100644
--- a/test/es-module/test-esm-named-exports.js
+++ b/test/es-module/test-esm-named-exports.js
@@ -1,4 +1,4 @@
-// Flags: --experimental-loader ./test/fixtures/es-module-loaders/builtin-named-exports-loader.mjs
+// Flags: --import ./test/fixtures/es-module-loaders/builtin-named-exports.mjs
'use strict';
require('../common');
diff --git a/test/es-module/test-esm-named-exports.mjs b/test/es-module/test-esm-named-exports.mjs
index ce8599e68b1bf5..bbe9c96b92d9b8 100644
--- a/test/es-module/test-esm-named-exports.mjs
+++ b/test/es-module/test-esm-named-exports.mjs
@@ -1,4 +1,4 @@
-// Flags: --experimental-loader ./test/fixtures/es-module-loaders/builtin-named-exports-loader.mjs
+// Flags: --import ./test/fixtures/es-module-loaders/builtin-named-exports.mjs
import '../common/index.mjs';
import { readFile, __fromLoader } from 'fs';
import assert from 'assert';
diff --git a/test/fixtures/es-module-loaders/builtin-named-exports-loader.mjs b/test/fixtures/es-module-loaders/builtin-named-exports-loader.mjs
index 7d462a53ada9d1..38fa0b3a7488aa 100644
--- a/test/fixtures/es-module-loaders/builtin-named-exports-loader.mjs
+++ b/test/fixtures/es-module-loaders/builtin-named-exports-loader.mjs
@@ -1,17 +1,10 @@
-import module from 'module';
+import module from 'node:module';
import { readFileSync } from 'node:fs';
-const GET_BUILTIN = `$__get_builtin_hole_${Date.now()}`;
-
-export function globalPreload() {
- return `Object.defineProperty(globalThis, ${JSON.stringify(GET_BUILTIN)}, {
- value: (builtinName) => {
- return getBuiltin(builtinName);
- },
- enumerable: false,
- configurable: false,
-});
-`;
+/** @type {string} */
+let GET_BUILTIN;
+export function initialize(data) {
+ GET_BUILTIN = data.GET_BUILTIN;
}
export async function resolve(specifier, context, next) {
@@ -56,7 +49,7 @@ const $builtinInstance = ${GET_BUILTIN}(${JSON.stringify(builtinName)});
module.exports = $builtinInstance;
module.exports.__fromLoader = true;
-// We need this for CJS-module-lexer can parse the exported names.
+// We need this for CJS-module-lexer can parse the exported names.
${
builtinExports
.map(name => `exports.${name} = $builtinInstance.${name};`)
diff --git a/test/fixtures/es-module-loaders/builtin-named-exports.mjs b/test/fixtures/es-module-loaders/builtin-named-exports.mjs
new file mode 100644
index 00000000000000..123b12c26bf0c9
--- /dev/null
+++ b/test/fixtures/es-module-loaders/builtin-named-exports.mjs
@@ -0,0 +1,17 @@
+import * as fixtures from '../../common/fixtures.mjs';
+import { createRequire, register } from 'node:module';
+
+const require = createRequire(import.meta.url);
+
+const GET_BUILTIN = `$__get_builtin_hole_${Date.now()}`;
+Object.defineProperty(globalThis, GET_BUILTIN, {
+ value: builtinName => require(builtinName),
+ enumerable: false,
+ configurable: false,
+});
+
+register(fixtures.fileURL('es-module-loaders/builtin-named-exports-loader.mjs'), {
+ data: {
+ GET_BUILTIN,
+ },
+});
diff --git a/test/fixtures/es-module-loaders/hook-resolve-type-loader.mjs b/test/fixtures/es-module-loaders/hook-resolve-type-loader.mjs
new file mode 100644
index 00000000000000..f2dc0aba5ca80e
--- /dev/null
+++ b/test/fixtures/es-module-loaders/hook-resolve-type-loader.mjs
@@ -0,0 +1,18 @@
+/** @type {MessagePort} */
+let port;
+export function initialize(data) {
+ port = data.port;
+}
+
+export async function resolve(specifier, context, next) {
+ const nextResult = await next(specifier, context);
+ const { format } = nextResult;
+
+ if (format === 'module' || specifier.endsWith('.mjs')) {
+ port.postMessage({ type: 'module' });
+ } else if (format == null || format === 'commonjs') {
+ port.postMessage({ type: 'commonjs' });
+ }
+
+ return nextResult;
+}
diff --git a/test/fixtures/es-module-loaders/hook-resolve-type.mjs b/test/fixtures/es-module-loaders/hook-resolve-type.mjs
index a4d87938ad843f..6fa7a553b8e83f 100644
--- a/test/fixtures/es-module-loaders/hook-resolve-type.mjs
+++ b/test/fixtures/es-module-loaders/hook-resolve-type.mjs
@@ -1,44 +1,30 @@
+import * as fixtures from '../../common/fixtures.mjs';
+import { register } from 'node:module';
+import { MessageChannel } from 'node:worker_threads';
+
let importedESM = 0;
let importedCJS = 0;
+export function getModuleTypeStats() {
+ return { importedESM, importedCJS };
+};
-export function globalPreload({ port }) {
- port.on('message', (int32) => {
- port.postMessage({ importedESM, importedCJS });
- Atomics.store(int32, 0, 1);
- Atomics.notify(int32, 0);
- });
- port.unref();
- return `
- const { receiveMessageOnPort } = getBuiltin('worker_threads');
- global.getModuleTypeStats = async function getModuleTypeStats() {
- const sab = new SharedArrayBuffer(4);
- const int32 = new Int32Array(sab);
- port.postMessage(int32);
- // Artificial timeout to keep the event loop alive.
- // https://bugs.chromium.org/p/v8/issues/detail?id=13238
- // TODO(targos) Remove when V8 issue is resolved.
- const timeout = setTimeout(() => { throw new Error('timeout'); }, 1_000);
- await Atomics.waitAsync(int32, 0, 0).value;
- clearTimeout(timeout);
- return receiveMessageOnPort(port).message;
- };
- `;
-}
-
-export async function load(url, context, next) {
- return next(url);
-}
+const { port1, port2 } = new MessageChannel();
-export async function resolve(specifier, context, next) {
- const nextResult = await next(specifier, context);
- const { format } = nextResult;
+register(fixtures.fileURL('es-module-loaders/hook-resolve-type-loader.mjs'), {
+ data: { port: port2 },
+ transferList: [port2],
+});
- if (format === 'module' || specifier.endsWith('.mjs')) {
- importedESM++;
- } else if (format == null || format === 'commonjs') {
- importedCJS++;
+port1.on('message', ({ type }) => {
+ switch (type) {
+ case 'module':
+ importedESM++;
+ break;
+ case 'commonjs':
+ importedCJS++;
+ break;
}
+});
- return nextResult;
-}
-
+port1.unref();
+port2.unref();
diff --git a/test/fixtures/es-module-loaders/hooks-initialize-port.mjs b/test/fixtures/es-module-loaders/hooks-initialize-port.mjs
index c522e3fa8bfd98..cefe8854297c50 100644
--- a/test/fixtures/es-module-loaders/hooks-initialize-port.mjs
+++ b/test/fixtures/es-module-loaders/hooks-initialize-port.mjs
@@ -3,7 +3,6 @@ let thePort = null;
export async function initialize(port) {
port.postMessage('initialize');
thePort = port;
- return 'ok';
}
export async function resolve(specifier, context, next) {
diff --git a/test/fixtures/es-module-loaders/hooks-initialize.mjs b/test/fixtures/es-module-loaders/hooks-initialize.mjs
index ab6f2c50d146e3..7622d982a9d7c5 100644
--- a/test/fixtures/es-module-loaders/hooks-initialize.mjs
+++ b/test/fixtures/es-module-loaders/hooks-initialize.mjs
@@ -4,5 +4,4 @@ let counter = 0;
export async function initialize() {
writeFileSync(1, `hooks initialize ${++counter}\n`);
- return counter;
}
diff --git a/test/fixtures/es-module-loaders/loader-this-value-inside-hook-functions.mjs b/test/fixtures/es-module-loaders/loader-this-value-inside-hook-functions.mjs
index c1c80622feea66..2be18c4969ef80 100644
--- a/test/fixtures/es-module-loaders/loader-this-value-inside-hook-functions.mjs
+++ b/test/fixtures/es-module-loaders/loader-this-value-inside-hook-functions.mjs
@@ -1,14 +1,21 @@
+export function initialize() {
+ if (this != null) {
+ throw new Error('hook function must not be bound to loader instance');
+ }
+}
+
export function resolve(url, _, next) {
- if (this != null) throw new Error('hook function must not be bound to ESMLoader instance');
+ if (this != null) {
+ throw new Error('hook function must not be bound to loader instance');
+ }
+
return next(url);
}
export function load(url, _, next) {
- if (this != null) throw new Error('hook function must not be bound to ESMLoader instance');
- return next(url);
-}
+ if (this != null) {
+ throw new Error('hook function must not be bound to loader instance');
+ }
-export function globalPreload() {
- if (this != null) throw new Error('hook function must not be bound to ESMLoader instance');
- return "";
+ return next(url);
}
diff --git a/test/fixtures/es-module-loaders/mock-loader.mjs b/test/fixtures/es-module-loaders/mock-loader.mjs
index 062be39603e851..3bb349b5385362 100644
--- a/test/fixtures/es-module-loaders/mock-loader.mjs
+++ b/test/fixtures/es-module-loaders/mock-loader.mjs
@@ -2,7 +2,7 @@ import { receiveMessageOnPort } from 'node:worker_threads';
const mockedModuleExports = new Map();
let currentMockVersion = 0;
-// This loader causes a new module `node:mock` to become available as a way to
+// These hooks enable code running on the application thread to
// swap module resolution results for mocking purposes. It uses this instead
// of import.meta so that CommonJS can still use the functionality.
//
@@ -22,7 +22,7 @@ let currentMockVersion = 0;
// it cannot be changed. So things like the following DO NOT WORK:
//
// ```mjs
-// import mock from 'node:mock';
+// import mock from 'test-esm-loader-mock'; // See test-esm-loader-mock.mjs
// mock('file:///app.js', {x:1});
// const namespace1 = await import('file:///app.js');
// namespace1.x; // 1
@@ -34,17 +34,6 @@ let currentMockVersion = 0;
// assert(namespace1 === namespace2);
// ```
-/**
- * FIXME: this is a hack to workaround loaders being
- * single threaded for now, just ensures that the MessagePort drains
- */
-function doDrainPort() {
- let msg;
- while (msg = receiveMessageOnPort(preloadPort)) {
- onPreloadPortMessage(msg.message);
- }
-}
-
/**
* @param param0 message from the application context
*/
@@ -54,127 +43,31 @@ function onPreloadPortMessage({
currentMockVersion = mockVersion;
mockedModuleExports.set(resolved, exports);
}
-let preloadPort;
-export function globalPreload({port}) {
- // Save the communication port to the application context to send messages
- // to it later
- preloadPort = port;
- // Every time the application context sends a message over the port
- port.on('message', onPreloadPortMessage);
- // This prevents the port that the Loader/application talk over
- // from keeping the process alive, without this, an application would be kept
- // alive just because a loader is waiting for messages
- port.unref();
- const insideAppContext = (getBuiltin, port, setImportMetaCallback) => {
- /**
- * This is the Map that saves *all* the mocked URL -> replacement Module
- * mappings
- * @type {Map}
- */
- let mockedModules = new Map();
- let mockVersion = 0;
- /**
- * This is the value that is placed into the `node:mock` default export
- *
- * @example
- * ```mjs
- * import mock from 'node:mock';
- * const mutator = mock('file:///app.js', {x:1});
- * const namespace = await import('file:///app.js');
- * namespace.x; // 1;
- * mutator.x = 2;
- * namespace.x; // 2;
- * ```
- *
- * @param {string} resolved an absolute URL HREF string
- * @param {object} replacementProperties an object to pick properties from
- * to act as a module namespace
- * @returns {object} a mutator object that can update the module namespace
- * since we can't do something like old Object.observe
- */
- const doMock = (resolved, replacementProperties) => {
- let exportNames = Object.keys(replacementProperties);
- let namespace = Object.create(null);
- /**
- * @type {Array<(name: string)=>void>} functions to call whenever an
- * export name is updated
- */
- let listeners = [];
- for (const name of exportNames) {
- let currentValueForPropertyName = replacementProperties[name];
- Object.defineProperty(namespace, name, {
- enumerable: true,
- get() {
- return currentValueForPropertyName;
- },
- set(v) {
- currentValueForPropertyName = v;
- for (let fn of listeners) {
- try {
- fn(name);
- } catch {
- }
- }
- }
- });
- }
- mockedModules.set(resolved, {
- namespace,
- listeners
- });
- mockVersion++;
- // Inform the loader that the `resolved` URL should now use the specific
- // `mockVersion` and has export names of `exportNames`
- //
- // This allows the loader to generate a fake module for that version
- // and names the next time it resolves a specifier to equal `resolved`
- port.postMessage({ mockVersion, resolved, exports: exportNames });
- return namespace;
- }
- // Sets the import.meta properties up
- // has the normal chaining workflow with `defaultImportMetaInitializer`
- setImportMetaCallback((meta, context, defaultImportMetaInitializer) => {
- /**
- * 'node:mock' creates its default export by plucking off of import.meta
- * and must do so in order to get the communications channel from inside
- * preloadCode
- */
- if (context.url === 'node:mock') {
- meta.doMock = doMock;
- return;
- }
- /**
- * Fake modules created by `node:mock` get their meta.mock utility set
- * to the corresponding value keyed off `mockedModules` and use this
- * to setup their exports/listeners properly
- */
- if (context.url.startsWith('mock-facade:')) {
- let [proto, version, encodedTargetURL] = context.url.split(':');
- let decodedTargetURL = decodeURIComponent(encodedTargetURL);
- if (mockedModules.has(decodedTargetURL)) {
- meta.mock = mockedModules.get(decodedTargetURL);
- return;
- }
- }
- /**
- * Ensure we still get things like `import.meta.url`
- */
- defaultImportMetaInitializer(meta, context);
- });
- };
- return `(${insideAppContext})(getBuiltin, port, setImportMetaCallback)`
+/** @type {URL['href']} */
+let mainImportURL;
+/** @type {MessagePort} */
+let preloadPort;
+export async function initialize(data) {
+ ({ mainImportURL, port: preloadPort } = data);
+
+ data.port.on('message', onPreloadPortMessage);
}
+/**
+ * Because Node.js internals use a separate MessagePort for cross-thread
+ * communication, there could be some messages pending that we should handle
+ * before continuing.
+ */
+function doDrainPort() {
+ let msg;
+ while (msg = receiveMessageOnPort(preloadPort)) {
+ onPreloadPortMessage(msg.message);
+ }
+}
// Rewrites node: loading to mock-facade: so that it can be intercepted
export async function resolve(specifier, context, defaultResolve) {
- if (specifier === 'node:mock') {
- return {
- shortCircuit: true,
- url: specifier
- };
- }
doDrainPort();
const def = await defaultResolve(specifier, context);
if (context.parentURL?.startsWith('mock-facade:')) {
@@ -193,55 +86,46 @@ export async function resolve(specifier, context, defaultResolve) {
export async function load(url, context, defaultLoad) {
doDrainPort();
- if (url === 'node:mock') {
- /**
- * Simply grab the import.meta.doMock to establish the communication
- * channel with preloadCode
- */
- return {
- shortCircuit: true,
- source: 'export default import.meta.doMock',
- format: 'module'
- };
- }
/**
* Mocked fake module, not going to be handled in default way so it
* generates the source text, then short circuits
*/
if (url.startsWith('mock-facade:')) {
- let [proto, version, encodedTargetURL] = url.split(':');
- let ret = generateModule(mockedModuleExports.get(
- decodeURIComponent(encodedTargetURL)
- ));
+ const encodedTargetURL = url.slice(url.lastIndexOf(':') + 1);
return {
shortCircuit: true,
- source: ret,
- format: 'module'
+ source: generateModule(encodedTargetURL),
+ format: 'module',
};
}
return defaultLoad(url, context);
}
/**
- *
- * @param {Array} exports name of the exports of the module
+ * Generate the source code for a mocked module.
+ * @param {string} encodedTargetURL the module being mocked
* @returns {string}
*/
-function generateModule(exports) {
+function generateModule(encodedTargetURL) {
+ const exports = mockedModuleExports.get(
+ decodeURIComponent(encodedTargetURL)
+ );
let body = [
+ `import { mockedModules } from ${JSON.stringify(mainImportURL)};`,
'export {};',
- 'let mapping = {__proto__: null};'
+ 'let mapping = {__proto__: null};',
+ `const mock = mockedModules.get(${JSON.stringify(encodedTargetURL)});`,
];
for (const [i, name] of Object.entries(exports)) {
let key = JSON.stringify(name);
- body.push(`var _${i} = import.meta.mock.namespace[${key}];`);
+ body.push(`var _${i} = mock.namespace[${key}];`);
body.push(`Object.defineProperty(mapping, ${key}, { enumerable: true, set(v) {_${i} = v;}, get() {return _${i};} });`);
body.push(`export {_${i} as ${name}};`);
}
- body.push(`import.meta.mock.listeners.push(${
+ body.push(`mock.listeners.push(${
() => {
for (var k in mapping) {
- mapping[k] = import.meta.mock.namespace[k];
+ mapping[k] = mock.namespace[k];
}
}
});`);
diff --git a/test/fixtures/es-module-loaders/mock.mjs b/test/fixtures/es-module-loaders/mock.mjs
new file mode 100644
index 00000000000000..cb167f1d5204c7
--- /dev/null
+++ b/test/fixtures/es-module-loaders/mock.mjs
@@ -0,0 +1,70 @@
+import { register } from 'node:module';
+import { MessageChannel } from 'node:worker_threads';
+
+
+const { port1, port2 } = new MessageChannel();
+
+register('./mock-loader.mjs', import.meta.url, {
+ data: {
+ port: port2,
+ mainImportURL: import.meta.url,
+ },
+ transferList: [port2],
+});
+
+/**
+ * This is the Map that saves *all* the mocked URL -> replacement Module
+ * mappings
+ * @type {Map}
+ */
+export const mockedModules = new Map();
+let mockVersion = 0;
+
+/**
+ * @param {string} resolved an absolute URL HREF string
+ * @param {object} replacementProperties an object to pick properties from
+ * to act as a module namespace
+ * @returns {object} a mutator object that can update the module namespace
+ * since we can't do something like old Object.observe
+ */
+export function mock(resolved, replacementProperties) {
+ const exportNames = Object.keys(replacementProperties);
+ const namespace = { __proto__: null };
+ /**
+ * @type {Array<(name: string)=>void>} functions to call whenever an
+ * export name is updated
+ */
+ const listeners = [];
+ for (const name of exportNames) {
+ let currentValueForPropertyName = replacementProperties[name];
+ Object.defineProperty(namespace, name, {
+ __proto__: null,
+ enumerable: true,
+ get() {
+ return currentValueForPropertyName;
+ },
+ set(v) {
+ currentValueForPropertyName = v;
+ for (const fn of listeners) {
+ try {
+ fn(name);
+ } catch {
+ /* noop */
+ }
+ }
+ },
+ });
+ }
+ mockedModules.set(encodeURIComponent(resolved), {
+ namespace,
+ listeners,
+ });
+ mockVersion++;
+ // Inform the loader that the `resolved` URL should now use the specific
+ // `mockVersion` and has export names of `exportNames`
+ //
+ // This allows the loader to generate a fake module for that version
+ // and names the next time it resolves a specifier to equal `resolved`
+ port1.postMessage({ mockVersion, resolved, exports: exportNames });
+ return namespace;
+}
diff --git a/test/fixtures/es-modules/invalid-posix-host.mjs b/test/fixtures/es-modules/invalid-posix-host.mjs
new file mode 100644
index 00000000000000..65ebb2c0496c15
--- /dev/null
+++ b/test/fixtures/es-modules/invalid-posix-host.mjs
@@ -0,0 +1 @@
+import "file://hmm.js";
diff --git a/test/fixtures/snapshot/cwd.js b/test/fixtures/snapshot/cwd.js
new file mode 100644
index 00000000000000..4860cc6662af8a
--- /dev/null
+++ b/test/fixtures/snapshot/cwd.js
@@ -0,0 +1,10 @@
+const {
+ setDeserializeMainFunction,
+} = require('v8').startupSnapshot;
+
+// To make sure the cwd is present in the cache
+process.cwd();
+
+setDeserializeMainFunction(() => {
+ console.log(process.cwd());
+});
diff --git a/test/fixtures/test-runner/output/abort.snapshot b/test/fixtures/test-runner/output/abort.snapshot
index ceca09da14bfb1..1b758a2314c486 100644
--- a/test/fixtures/test-runner/output/abort.snapshot
+++ b/test/fixtures/test-runner/output/abort.snapshot
@@ -32,7 +32,7 @@ TAP version 13
# Subtest: not ok 2
not ok 6 - not ok 2
---
- duration_ms: ZERO
+ duration_ms: *
location: '/test/fixtures/test-runner/output/abort.js:(LINE):7'
failureType: 'cancelledByParent'
error: 'test did not finish before its parent and was cancelled'
@@ -41,7 +41,7 @@ TAP version 13
# Subtest: not ok 3
not ok 7 - not ok 3
---
- duration_ms: ZERO
+ duration_ms: *
location: '/test/fixtures/test-runner/output/abort.js:(LINE):7'
failureType: 'testAborted'
error: 'This operation was aborted'
@@ -62,7 +62,7 @@ TAP version 13
# Subtest: not ok 4
not ok 8 - not ok 4
---
- duration_ms: ZERO
+ duration_ms: *
location: '/test/fixtures/test-runner/output/abort.js:(LINE):7'
failureType: 'testAborted'
error: 'This operation was aborted'
@@ -83,7 +83,7 @@ TAP version 13
# Subtest: not ok 5
not ok 9 - not ok 5
---
- duration_ms: ZERO
+ duration_ms: *
location: '/test/fixtures/test-runner/output/abort.js:(LINE):7'
failureType: 'testAborted'
error: 'This operation was aborted'
@@ -169,7 +169,7 @@ not ok 2 - promise abort signal
# Subtest: not ok 2
not ok 6 - not ok 2
---
- duration_ms: ZERO
+ duration_ms: *
location: '/test/fixtures/test-runner/output/abort.js:(LINE):5'
failureType: 'cancelledByParent'
error: 'test did not finish before its parent and was cancelled'
@@ -178,7 +178,7 @@ not ok 2 - promise abort signal
# Subtest: not ok 3
not ok 7 - not ok 3
---
- duration_ms: ZERO
+ duration_ms: *
location: '/test/fixtures/test-runner/output/abort.js:(LINE):5'
failureType: 'testAborted'
error: 'This operation was aborted'
@@ -199,7 +199,7 @@ not ok 2 - promise abort signal
# Subtest: not ok 4
not ok 8 - not ok 4
---
- duration_ms: ZERO
+ duration_ms: *
location: '/test/fixtures/test-runner/output/abort.js:(LINE):5'
failureType: 'testAborted'
error: 'This operation was aborted'
@@ -220,7 +220,7 @@ not ok 2 - promise abort signal
# Subtest: not ok 5
not ok 9 - not ok 5
---
- duration_ms: ZERO
+ duration_ms: *
location: '/test/fixtures/test-runner/output/abort.js:(LINE):5'
failureType: 'testAborted'
error: 'This operation was aborted'
diff --git a/test/fixtures/test-runner/output/abort_hooks.snapshot b/test/fixtures/test-runner/output/abort_hooks.snapshot
index d0b567bb6a22cd..278b5e5fd36ca5 100644
--- a/test/fixtures/test-runner/output/abort_hooks.snapshot
+++ b/test/fixtures/test-runner/output/abort_hooks.snapshot
@@ -11,7 +11,7 @@ TAP version 13
# Subtest: test 1
not ok 1 - test 1
---
- duration_ms: ZERO
+ duration_ms: *
location: '/test/fixtures/test-runner/output/abort_hooks.js:(LINE):3'
failureType: 'cancelledByParent'
error: 'test did not finish before its parent and was cancelled'
@@ -20,7 +20,7 @@ TAP version 13
# Subtest: test 2
not ok 2 - test 2
---
- duration_ms: ZERO
+ duration_ms: *
location: '/test/fixtures/test-runner/output/abort_hooks.js:(LINE):3'
failureType: 'cancelledByParent'
error: 'test did not finish before its parent and was cancelled'
diff --git a/test/fixtures/test-runner/output/abort_suite.snapshot b/test/fixtures/test-runner/output/abort_suite.snapshot
index e7e8c4f4e2360f..30d48d236ff4a5 100644
--- a/test/fixtures/test-runner/output/abort_suite.snapshot
+++ b/test/fixtures/test-runner/output/abort_suite.snapshot
@@ -32,7 +32,7 @@ TAP version 13
# Subtest: not ok 2
not ok 6 - not ok 2
---
- duration_ms: ZERO
+ duration_ms: *
location: '/test/fixtures/test-runner/output/abort_suite.js:(LINE):3'
failureType: 'cancelledByParent'
error: 'test did not finish before its parent and was cancelled'
@@ -41,7 +41,7 @@ TAP version 13
# Subtest: not ok 3
not ok 7 - not ok 3
---
- duration_ms: ZERO
+ duration_ms: *
location: '/test/fixtures/test-runner/output/abort_suite.js:(LINE):3'
failureType: 'testAborted'
error: 'This operation was aborted'
@@ -62,7 +62,7 @@ TAP version 13
# Subtest: not ok 4
not ok 8 - not ok 4
---
- duration_ms: ZERO
+ duration_ms: *
location: '/test/fixtures/test-runner/output/abort_suite.js:(LINE):3'
failureType: 'testAborted'
error: 'This operation was aborted'
@@ -83,7 +83,7 @@ TAP version 13
# Subtest: not ok 5
not ok 9 - not ok 5
---
- duration_ms: ZERO
+ duration_ms: *
location: '/test/fixtures/test-runner/output/abort_suite.js:(LINE):3'
failureType: 'testAborted'
error: 'This operation was aborted'
diff --git a/test/fixtures/test-runner/output/arbitrary-output.snapshot b/test/fixtures/test-runner/output/arbitrary-output.snapshot
index 2389096398cd09..601aaa42f3c74a 100644
--- a/test/fixtures/test-runner/output/arbitrary-output.snapshot
+++ b/test/fixtures/test-runner/output/arbitrary-output.snapshot
@@ -1,17 +1,17 @@
TAP version 13
ok 1 - test
---
- duration_ms: ZERO
+ duration_ms: *
...
# arbitrary - pre
ok 2 - test
---
- duration_ms: ZERO
+ duration_ms: *
...
# arbitrary - mid
ok 3 - test
---
- duration_ms: ZERO
+ duration_ms: *
...
# arbitrary - post
1..3
diff --git a/test/fixtures/test-runner/output/coverage_failure.js b/test/fixtures/test-runner/output/coverage_failure.js
new file mode 100644
index 00000000000000..6c4d25ce081cad
--- /dev/null
+++ b/test/fixtures/test-runner/output/coverage_failure.js
@@ -0,0 +1,13 @@
+// Flags: --expose-internals --experimental-test-coverage
+
+'use strict';
+require('../../../common');
+const { TestCoverage } = require('internal/test_runner/coverage');
+const { test, mock } = require('node:test');
+
+mock.method(TestCoverage.prototype, 'summary', () => {
+ throw new Error('Failed to collect coverage');
+});
+
+test('ok');
+
diff --git a/test/fixtures/test-runner/output/coverage_failure.snapshot b/test/fixtures/test-runner/output/coverage_failure.snapshot
new file mode 100644
index 00000000000000..62f39ebede943a
--- /dev/null
+++ b/test/fixtures/test-runner/output/coverage_failure.snapshot
@@ -0,0 +1,16 @@
+TAP version 13
+# Subtest: ok
+ok 1 - ok
+ ---
+ duration_ms: *
+ ...
+1..1
+# Warning: Could not report code coverage. Error: Failed to collect coverage
+# tests 1
+# suites 0
+# pass 1
+# fail 0
+# cancelled 0
+# skipped 0
+# todo 0
+# duration_ms *
diff --git a/test/fixtures/test-runner/output/describe_it.snapshot b/test/fixtures/test-runner/output/describe_it.snapshot
index be345f11575c8d..1d4f7853ead0d1 100644
--- a/test/fixtures/test-runner/output/describe_it.snapshot
+++ b/test/fixtures/test-runner/output/describe_it.snapshot
@@ -513,7 +513,7 @@ not ok 51 - subtest sync throw fails
# Subtest: should not run
not ok 1 - should not run
---
- duration_ms: ZERO
+ duration_ms: *
location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):3'
failureType: 'cancelledByParent'
error: 'test did not finish before its parent and was cancelled'
@@ -544,7 +544,7 @@ not ok 52 - describe sync throw fails
# Subtest: should not run
not ok 1 - should not run
---
- duration_ms: ZERO
+ duration_ms: *
location: '/test/fixtures/test-runner/output/describe_it.js:(LINE):3'
failureType: 'cancelledByParent'
error: 'test did not finish before its parent and was cancelled'
diff --git a/test/fixtures/test-runner/output/hooks.snapshot b/test/fixtures/test-runner/output/hooks.snapshot
index 5afe398ed3d0ea..6cf29612c535cb 100644
--- a/test/fixtures/test-runner/output/hooks.snapshot
+++ b/test/fixtures/test-runner/output/hooks.snapshot
@@ -37,7 +37,7 @@ ok 1 - describe hooks
# Subtest: 1
not ok 1 - 1
---
- duration_ms: ZERO
+ duration_ms: *
location: '/test/fixtures/test-runner/output/hooks.js:(LINE):3'
failureType: 'cancelledByParent'
error: 'test did not finish before its parent and was cancelled'
@@ -46,7 +46,7 @@ ok 1 - describe hooks
# Subtest: 2
not ok 2 - 2
---
- duration_ms: ZERO
+ duration_ms: *
location: '/test/fixtures/test-runner/output/hooks.js:(LINE):3'
failureType: 'cancelledByParent'
error: 'test did not finish before its parent and was cancelled'
diff --git a/test/fixtures/test-runner/output/junit_reporter.js b/test/fixtures/test-runner/output/junit_reporter.js
new file mode 100644
index 00000000000000..1f49b3f6042d97
--- /dev/null
+++ b/test/fixtures/test-runner/output/junit_reporter.js
@@ -0,0 +1,7 @@
+'use strict';
+require('../../../common');
+const fixtures = require('../../../common/fixtures');
+const spawn = require('node:child_process').spawn;
+
+spawn(process.execPath,
+ ['--no-warnings', '--test-reporter', 'junit', fixtures.path('test-runner/output/output.js')], { stdio: 'inherit' });
diff --git a/test/fixtures/test-runner/output/junit_reporter.snapshot b/test/fixtures/test-runner/output/junit_reporter.snapshot
new file mode 100644
index 00000000000000..6516387e7ed582
--- /dev/null
+++ b/test/fixtures/test-runner/output/junit_reporter.snapshot
@@ -0,0 +1,488 @@
+
+
+
+
+
+
+
+
+
+
+
+[Error [ERR_TEST_FAILURE]: thrown from sync fail todo] {
+ failureType: 'testCodeFailure',
+ cause: Error: thrown from sync fail todo
+ *
+ *
+ *
+ *
+ *
+ *
+ at async Test.processPendingSubtests (node:internal/test_runner/test:374:7),
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+
+[Error [ERR_TEST_FAILURE]: thrown from sync fail todo with message] {
+ failureType: 'testCodeFailure',
+ cause: Error: thrown from sync fail todo with message
+ *
+ *
+ *
+ *
+ *
+ *
+ at async Test.processPendingSubtests (node:internal/test_runner/test:374:7),
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+
+
+
+
+
+
+
+
+[Error [ERR_TEST_FAILURE]: thrown from sync throw fail] {
+ failureType: 'testCodeFailure',
+ cause: Error: thrown from sync throw fail
+ *
+ *
+ *
+ *
+ *
+ *
+ at async Test.processPendingSubtests (node:internal/test_runner/test:374:7),
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+
+
+
+
+[Error [ERR_TEST_FAILURE]: thrown from async throw fail] {
+ failureType: 'testCodeFailure',
+ cause: Error: thrown from async throw fail
+ *
+ *
+ *
+ *
+ *
+ *
+ at async Test.processPendingSubtests (node:internal/test_runner/test:374:7),
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+
+[Error [ERR_TEST_FAILURE]: thrown from async throw fail] {
+ failureType: 'testCodeFailure',
+ cause: Error: thrown from async throw fail
+ *
+ *
+ *
+ *
+ *
+ *
+ at async Test.processPendingSubtests (node:internal/test_runner/test:374:7),
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+[Error [ERR_TEST_FAILURE]: Expected values to be strictly equal:
+
+true !== false
+] {
+ failureType: 'testCodeFailure',
+ cause: AssertionError [ERR_ASSERTION]: Expected values to be strictly equal:
+
+ true !== false
+
+ *
+ *
+ *
+ *
+ *
+ *
+ * {
+ generatedMessage: true,
+ code: 'ERR_ASSERTION',
+ actual: true,
+ expected: false,
+ operator: 'strictEqual'
+ },
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+
+[Error [ERR_TEST_FAILURE]: rejected from reject fail] {
+ failureType: 'testCodeFailure',
+ cause: Error: rejected from reject fail
+ *
+ *
+ *
+ *
+ *
+ *
+ at async Test.processPendingSubtests (node:internal/test_runner/test:374:7),
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+
+
+
+
+
+
+Error [ERR_TEST_FAILURE]: thrown from subtest sync throw fail
+ * {
+ failureType: 'testCodeFailure',
+ cause: Error: thrown from subtest sync throw fail
+ *
+ *
+ *
+ *
+ *
+ *
+ *
+ *
+ *
+ at Test.postRun (node:internal/test_runner/test:715:19),
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+
+
+[Error [ERR_TEST_FAILURE]: Symbol(thrown symbol from sync throw non-error fail)] { failureType: 'testCodeFailure', cause: Symbol(thrown symbol from sync throw non-error fail), code: 'ERR_TEST_FAILURE' }
+
+
+
+
+
+
+
+
+
+
+
+[Error [ERR_TEST_FAILURE]: test did not finish before its parent and was cancelled] { failureType: 'cancelledByParent', cause: 'test did not finish before its parent and was cancelled', code: 'ERR_TEST_FAILURE' }
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+[Error [ERR_TEST_FAILURE]: this should be executed] {
+ failureType: 'testCodeFailure',
+ cause: Error: this should be executed
+ *
+ *
+ *
+ *
+ *
+ *
+ at async Test.processPendingSubtests (node:internal/test_runner/test:374:7),
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+[Error [ERR_TEST_FAILURE]: callback failure] {
+ failureType: 'testCodeFailure',
+ cause: Error: callback failure
+ *
+ at process.processImmediate (node:internal/timers:478:21),
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+
+
+
+[Error [ERR_TEST_FAILURE]: passed a callback but also returned a Promise] { failureType: 'callbackAndPromisePresent', cause: 'passed a callback but also returned a Promise', code: 'ERR_TEST_FAILURE' }
+
+
+
+
+[Error [ERR_TEST_FAILURE]: thrown from callback throw] {
+ failureType: 'testCodeFailure',
+ cause: Error: thrown from callback throw
+ *
+ *
+ *
+ *
+ *
+ *
+ at async Test.processPendingSubtests (node:internal/test_runner/test:374:7),
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+Error [ERR_TEST_FAILURE]: callback invoked multiple times
+ *
+ * {
+ failureType: 'multipleCallbackInvocations',
+ cause: 'callback invoked multiple times',
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+
+Error [ERR_TEST_FAILURE]: callback invoked multiple times
+ * {
+ failureType: 'uncaughtException',
+ cause: Error [ERR_TEST_FAILURE]: callback invoked multiple times
+ * {
+ failureType: 'multipleCallbackInvocations',
+ cause: 'callback invoked multiple times',
+ code: 'ERR_TEST_FAILURE'
+ },
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+Error [ERR_TEST_FAILURE]: thrown from callback async throw
+ * {
+ failureType: 'uncaughtException',
+ cause: Error: thrown from callback async throw
+ *
+ at process.processImmediate (node:internal/timers:478:21),
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+[Error [ERR_TEST_FAILURE]: customized] { failureType: 'testCodeFailure', cause: customized, code: 'ERR_TEST_FAILURE' }
+
+
+
+
+[Error [ERR_TEST_FAILURE]: {
+ foo: 1,
+ [Symbol(nodejs.util.inspect.custom)]: [Function: [nodejs.util.inspect.custom]]
+}] {
+ failureType: 'testCodeFailure',
+ cause: { foo: 1, [Symbol(nodejs.util.inspect.custom)]: [Function: [nodejs.util.inspect.custom]] },
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+
+Error [ERR_TEST_FAILURE]: thrown from subtest sync throw fails at first
+ * {
+ failureType: 'testCodeFailure',
+ cause: Error: thrown from subtest sync throw fails at first
+ *
+ *
+ *
+ *
+ *
+ *
+ *
+ *
+ *
+ at Test.postRun (node:internal/test_runner/test:715:19),
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+Error [ERR_TEST_FAILURE]: thrown from subtest sync throw fails at second
+ * {
+ failureType: 'testCodeFailure',
+ cause: Error: thrown from subtest sync throw fails at second
+ *
+ *
+ *
+ *
+ *
+ *
+ *
+ *
+ *
+ at async Test.run (node:internal/test_runner/test:632:9),
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+
+[Error [ERR_TEST_FAILURE]: test timed out after 5ms] { failureType: 'testTimeoutFailure', cause: 'test timed out after 5ms', code: 'ERR_TEST_FAILURE' }
+
+
+
+
+[Error [ERR_TEST_FAILURE]: test timed out after 5ms] { failureType: 'testTimeoutFailure', cause: 'test timed out after 5ms', code: 'ERR_TEST_FAILURE' }
+
+
+
+
+
+
+
+[Error [ERR_TEST_FAILURE]: custom error] { failureType: 'testCodeFailure', cause: 'custom error', code: 'ERR_TEST_FAILURE' }
+
+
+
+
+Error [ERR_TEST_FAILURE]: foo
+ * {
+ failureType: 'uncaughtException',
+ cause: Error: foo
+ *
+ *
+ at process.processTimers (node:internal/timers:514:7),
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+Error [ERR_TEST_FAILURE]: bar
+ * {
+ failureType: 'unhandledRejection',
+ cause: Error: bar
+ *
+ *
+ at process.processTimers (node:internal/timers:514:7),
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+[Error [ERR_TEST_FAILURE]: Expected values to be loosely deep-equal:
+
+{
+ bar: 1,
+ foo: 1
+}
+
+should loosely deep-equal
+
+<ref *1> {
+ bar: 2,
+ c: [Circular *1]
+}] {
+ failureType: 'testCodeFailure',
+ cause: AssertionError [ERR_ASSERTION]: Expected values to be loosely deep-equal:
+
+ {
+ bar: 1,
+ foo: 1
+ }
+
+ should loosely deep-equal
+
+ <ref *1> {
+ bar: 2,
+ c: [Circular *1]
+ }
+ * {
+ generatedMessage: true,
+ code: 'ERR_ASSERTION',
+ actual: [Object],
+ expected: [Object],
+ operator: 'deepEqual'
+ },
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+Error [ERR_TEST_FAILURE]: test could not be started because its parent finished
+ * {
+ failureType: 'parentAlreadyFinished',
+ cause: 'test could not be started because its parent finished',
+ code: 'ERR_TEST_FAILURE'
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/test/fixtures/test-runner/output/unresolved_promise.snapshot b/test/fixtures/test-runner/output/unresolved_promise.snapshot
index 839ec311a65e04..0090885468c338 100644
--- a/test/fixtures/test-runner/output/unresolved_promise.snapshot
+++ b/test/fixtures/test-runner/output/unresolved_promise.snapshot
@@ -18,7 +18,7 @@ not ok 2 - never resolving promise
# Subtest: fail
not ok 3 - fail
---
- duration_ms: ZERO
+ duration_ms: *
location: '/test/fixtures/test-runner/output/unresolved_promise.js:(LINE):1'
failureType: 'cancelledByParent'
error: 'Promise resolution is still pending but the event loop has already resolved'
diff --git a/test/fixtures/test-runner/test_only.js b/test/fixtures/test-runner/test_only.js
new file mode 100644
index 00000000000000..efc79b9dfadca6
--- /dev/null
+++ b/test/fixtures/test-runner/test_only.js
@@ -0,0 +1,5 @@
+'use strict';
+const test = require('node:test');
+
+test('this should be skipped');
+test.only('this should be executed');
diff --git a/test/fixtures/wpt/README.md b/test/fixtures/wpt/README.md
index 890c8f9e1dbe3b..c8210edb569c17 100644
--- a/test/fixtures/wpt/README.md
+++ b/test/fixtures/wpt/README.md
@@ -31,7 +31,7 @@ Last update:
- user-timing: https://github.com/web-platform-tests/wpt/tree/5ae85bf826/user-timing
- wasm/jsapi: https://github.com/web-platform-tests/wpt/tree/cde25e7e3c/wasm/jsapi
- wasm/webapi: https://github.com/web-platform-tests/wpt/tree/fd1b23eeaa/wasm/webapi
-- WebCryptoAPI: https://github.com/web-platform-tests/wpt/tree/17b7ca10fd/WebCryptoAPI
+- WebCryptoAPI: https://github.com/web-platform-tests/wpt/tree/f4e7e32fd0/WebCryptoAPI
- webidl/ecmascript-binding/es-exceptions: https://github.com/web-platform-tests/wpt/tree/a370aad338/webidl/ecmascript-binding/es-exceptions
- webmessaging/broadcastchannel: https://github.com/web-platform-tests/wpt/tree/e97fac4791/webmessaging/broadcastchannel
diff --git a/test/fixtures/wpt/WebCryptoAPI/import_export/ec_importKey.https.any.js b/test/fixtures/wpt/WebCryptoAPI/import_export/ec_importKey.https.any.js
index 25defa369c1d31..31f062e313f6fe 100644
--- a/test/fixtures/wpt/WebCryptoAPI/import_export/ec_importKey.https.any.js
+++ b/test/fixtures/wpt/WebCryptoAPI/import_export/ec_importKey.https.any.js
@@ -71,7 +71,7 @@
[true, false].forEach(function(extractable) {
// Test public keys first
- [[]].forEach(function(usages) { // Only valid usages argument is empty array
+ allValidUsages(vector.publicUsages, true).forEach(function(usages) {
['spki', 'spki_compressed', 'jwk', 'raw', 'raw_compressed'].forEach(function(format) {
var algorithm = {name: vector.name, namedCurve: curve};
var data = keyData[curve];
@@ -88,7 +88,7 @@
['pkcs8', 'jwk'].forEach(function(format) {
var algorithm = {name: vector.name, namedCurve: curve};
var data = keyData[curve];
- allValidUsages(vector.privateUsages, []).forEach(function(usages) {
+ allValidUsages(vector.privateUsages).forEach(function(usages) {
testFormat(format, algorithm, data, curve, usages, extractable);
});
testEmptyUsages(format, algorithm, data, curve, extractable);
@@ -219,46 +219,6 @@
return base64String.replace(/=/g, "");
}
- // Want to test every valid combination of usages. Start by creating a list
- // of all non-empty subsets to possible usages.
- function allNonemptySubsetsOf(arr) {
- var results = [];
- var firstElement;
- var remainingElements;
-
- for(var i=0; i 0) {
- allNonemptySubsetsOf(remainingElements).forEach(function(combination) {
- combination.push(firstElement);
- results.push(combination);
- });
- }
- }
-
- return results;
- }
-
- // Return a list of all valid usage combinations, given the possible ones
- // and the ones that are required for a particular operation.
- function allValidUsages(possibleUsages, requiredUsages) {
- var allUsages = [];
-
- allNonemptySubsetsOf(possibleUsages).forEach(function(usage) {
- for (var i=0; i 0) {
- allNonemptySubsetsOf(remainingElements).forEach(function(combination) {
- combination.push(firstElement);
- results.push(combination);
- });
- }
- }
-
- return results;
- }
-
- // Return a list of all valid usage combinations, given the possible ones
- // and the ones that are required for a particular operation.
- function allValidUsages(possibleUsages, requiredUsages) {
- var allUsages = [];
-
- allNonemptySubsetsOf(possibleUsages).forEach(function(usage) {
- for (var i=0; i 0) {
- allNonemptySubsetsOf(remainingElements).forEach(function(combination) {
- combination.push(firstElement);
- results.push(combination);
- });
- }
- }
-
- return results;
- }
-
- // Return a list of all valid usage combinations, given the possible ones
- // and the ones that are required for a particular operation.
- function allValidUsages(possibleUsages, requiredUsages) {
- var allUsages = [];
-
- allNonemptySubsetsOf(possibleUsages).forEach(function(usage) {
- for (var i=0; i 0) {
- allNonemptySubsetsOf(remainingElements).forEach(function(combination) {
- combination.push(firstElement);
- results.push(combination);
- });
- }
- }
-
- return results;
- }
-
- // Return a list of all valid usage combinations, given the possible ones
- // and the ones that are required for a particular operation.
- function allValidUsages(possibleUsages, requiredUsages) {
- var allUsages = [];
-
- allNonemptySubsetsOf(possibleUsages).forEach(function(usage) {
- for (var i=0; i
#include "../common.h"
+#include "../entry_point.h"
static napi_value Add(napi_env env, napi_callback_info info) {
size_t argc = 2;
diff --git a/test/js-native-api/2_function_arguments/binding.gyp b/test/js-native-api/2_function_arguments/binding.gyp
index 7e35a4c9d6dc05..77836418d4736e 100644
--- a/test/js-native-api/2_function_arguments/binding.gyp
+++ b/test/js-native-api/2_function_arguments/binding.gyp
@@ -3,7 +3,6 @@
{
"target_name": "2_function_arguments",
"sources": [
- "../entry_point.c",
"2_function_arguments.c"
]
}
diff --git a/test/js-native-api/3_callbacks/3_callbacks.c b/test/js-native-api/3_callbacks/3_callbacks.c
index 3be18daff1d7a6..44bd2455749145 100644
--- a/test/js-native-api/3_callbacks/3_callbacks.c
+++ b/test/js-native-api/3_callbacks/3_callbacks.c
@@ -1,6 +1,7 @@
#include
-#include "../common.h"
#include
+#include "../common.h"
+#include "../entry_point.h"
static napi_value RunCallback(napi_env env, napi_callback_info info) {
size_t argc = 2;
diff --git a/test/js-native-api/3_callbacks/binding.gyp b/test/js-native-api/3_callbacks/binding.gyp
index 3cc662c4076dc1..0b3e2eb96cd903 100644
--- a/test/js-native-api/3_callbacks/binding.gyp
+++ b/test/js-native-api/3_callbacks/binding.gyp
@@ -3,7 +3,6 @@
{
"target_name": "3_callbacks",
"sources": [
- "../entry_point.c",
"3_callbacks.c"
]
}
diff --git a/test/js-native-api/4_object_factory/4_object_factory.c b/test/js-native-api/4_object_factory/4_object_factory.c
index 5b06517744dd3e..8fd6090f22a37e 100644
--- a/test/js-native-api/4_object_factory/4_object_factory.c
+++ b/test/js-native-api/4_object_factory/4_object_factory.c
@@ -1,5 +1,6 @@
#include
#include "../common.h"
+#include "../entry_point.h"
static napi_value CreateObject(napi_env env, napi_callback_info info) {
size_t argc = 1;
diff --git a/test/js-native-api/4_object_factory/binding.gyp b/test/js-native-api/4_object_factory/binding.gyp
index 6cb3a9fa68b48a..c1f2aca1498346 100644
--- a/test/js-native-api/4_object_factory/binding.gyp
+++ b/test/js-native-api/4_object_factory/binding.gyp
@@ -3,7 +3,6 @@
{
"target_name": "4_object_factory",
"sources": [
- "../entry_point.c",
"4_object_factory.c"
]
}
diff --git a/test/js-native-api/5_function_factory/5_function_factory.c b/test/js-native-api/5_function_factory/5_function_factory.c
index 679f09fee9e49e..8c2bdac5bd5f94 100644
--- a/test/js-native-api/5_function_factory/5_function_factory.c
+++ b/test/js-native-api/5_function_factory/5_function_factory.c
@@ -1,5 +1,6 @@
#include
#include "../common.h"
+#include "../entry_point.h"
static napi_value MyFunction(napi_env env, napi_callback_info info) {
napi_value str;
diff --git a/test/js-native-api/5_function_factory/binding.gyp b/test/js-native-api/5_function_factory/binding.gyp
index c621c29f185cab..183332d3441112 100644
--- a/test/js-native-api/5_function_factory/binding.gyp
+++ b/test/js-native-api/5_function_factory/binding.gyp
@@ -3,7 +3,6 @@
{
"target_name": "5_function_factory",
"sources": [
- "../entry_point.c",
"5_function_factory.c"
]
}
diff --git a/test/js-native-api/6_object_wrap/6_object_wrap.cc b/test/js-native-api/6_object_wrap/6_object_wrap.cc
index 7ebe711a6dccf1..49b1241fb38caa 100644
--- a/test/js-native-api/6_object_wrap/6_object_wrap.cc
+++ b/test/js-native-api/6_object_wrap/6_object_wrap.cc
@@ -1,4 +1,5 @@
#include "../common.h"
+#include "../entry_point.h"
#include "assert.h"
#include "myobject.h"
diff --git a/test/js-native-api/6_object_wrap/binding.gyp b/test/js-native-api/6_object_wrap/binding.gyp
index 2807d6a1572529..44c9c3f837b4a6 100644
--- a/test/js-native-api/6_object_wrap/binding.gyp
+++ b/test/js-native-api/6_object_wrap/binding.gyp
@@ -3,7 +3,6 @@
{
"target_name": "6_object_wrap",
"sources": [
- "../entry_point.c",
"6_object_wrap.cc"
]
}
diff --git a/test/js-native-api/7_factory_wrap/7_factory_wrap.cc b/test/js-native-api/7_factory_wrap/7_factory_wrap.cc
index b1dbd8eee4945f..5fb7a6670d74d8 100644
--- a/test/js-native-api/7_factory_wrap/7_factory_wrap.cc
+++ b/test/js-native-api/7_factory_wrap/7_factory_wrap.cc
@@ -1,6 +1,7 @@
#include
-#include "myobject.h"
#include "../common.h"
+#include "../entry_point.h"
+#include "myobject.h"
napi_value CreateObject(napi_env env, napi_callback_info info) {
size_t argc = 1;
diff --git a/test/js-native-api/7_factory_wrap/binding.gyp b/test/js-native-api/7_factory_wrap/binding.gyp
index f9096674a70b5c..bb7c8aab1826a2 100644
--- a/test/js-native-api/7_factory_wrap/binding.gyp
+++ b/test/js-native-api/7_factory_wrap/binding.gyp
@@ -3,7 +3,6 @@
{
"target_name": "7_factory_wrap",
"sources": [
- "../entry_point.c",
"7_factory_wrap.cc",
"myobject.cc"
]
diff --git a/test/js-native-api/8_passing_wrapped/8_passing_wrapped.cc b/test/js-native-api/8_passing_wrapped/8_passing_wrapped.cc
index 5b3b7909582e21..1a3e6d1072045b 100644
--- a/test/js-native-api/8_passing_wrapped/8_passing_wrapped.cc
+++ b/test/js-native-api/8_passing_wrapped/8_passing_wrapped.cc
@@ -1,6 +1,7 @@
#include
-#include "myobject.h"
#include "../common.h"
+#include "../entry_point.h"
+#include "myobject.h"
extern size_t finalize_count;
diff --git a/test/js-native-api/8_passing_wrapped/binding.gyp b/test/js-native-api/8_passing_wrapped/binding.gyp
index f85cc4cc97ae45..206d106e52cf94 100644
--- a/test/js-native-api/8_passing_wrapped/binding.gyp
+++ b/test/js-native-api/8_passing_wrapped/binding.gyp
@@ -3,7 +3,6 @@
{
"target_name": "8_passing_wrapped",
"sources": [
- "../entry_point.c",
"8_passing_wrapped.cc",
"myobject.cc"
]
diff --git a/test/js-native-api/common-inl.h b/test/js-native-api/common-inl.h
new file mode 100644
index 00000000000000..d4db4a3e58bdc6
--- /dev/null
+++ b/test/js-native-api/common-inl.h
@@ -0,0 +1,56 @@
+#ifndef JS_NATIVE_API_COMMON_INL_H_
+#define JS_NATIVE_API_COMMON_INL_H_
+
+#include
+#include "common.h"
+
+#include
+
+inline void add_returned_status(napi_env env,
+ const char* key,
+ napi_value object,
+ char* expected_message,
+ napi_status expected_status,
+ napi_status actual_status) {
+ char napi_message_string[100] = "";
+ napi_value prop_value;
+
+ if (actual_status != expected_status) {
+ snprintf(napi_message_string,
+ sizeof(napi_message_string),
+ "Invalid status [%d]",
+ actual_status);
+ }
+
+ NODE_API_CALL_RETURN_VOID(
+ env,
+ napi_create_string_utf8(
+ env,
+ (actual_status == expected_status ? expected_message
+ : napi_message_string),
+ NAPI_AUTO_LENGTH,
+ &prop_value));
+ NODE_API_CALL_RETURN_VOID(
+ env, napi_set_named_property(env, object, key, prop_value));
+}
+
+inline void add_last_status(napi_env env,
+ const char* key,
+ napi_value return_value) {
+ napi_value prop_value;
+ const napi_extended_error_info* p_last_error;
+ NODE_API_CALL_RETURN_VOID(env, napi_get_last_error_info(env, &p_last_error));
+
+ NODE_API_CALL_RETURN_VOID(
+ env,
+ napi_create_string_utf8(
+ env,
+ (p_last_error->error_message == NULL ? "napi_ok"
+ : p_last_error->error_message),
+ NAPI_AUTO_LENGTH,
+ &prop_value));
+ NODE_API_CALL_RETURN_VOID(
+ env, napi_set_named_property(env, return_value, key, prop_value));
+}
+
+#endif // JS_NATIVE_API_COMMON_INL_H_
diff --git a/test/js-native-api/common.c b/test/js-native-api/common.c
deleted file mode 100644
index 865d2064bdef85..00000000000000
--- a/test/js-native-api/common.c
+++ /dev/null
@@ -1,48 +0,0 @@
-#include
-#include "common.h"
-
-#include
-
-void add_returned_status(napi_env env,
- const char* key,
- napi_value object,
- char* expected_message,
- napi_status expected_status,
- napi_status actual_status) {
-
- char napi_message_string[100] = "";
- napi_value prop_value;
-
- if (actual_status != expected_status) {
- snprintf(napi_message_string, sizeof(napi_message_string),
- "Invalid status [%d]", actual_status);
- }
-
- NODE_API_CALL_RETURN_VOID(env,
- napi_create_string_utf8(
- env,
- (actual_status == expected_status ?
- expected_message :
- napi_message_string),
- NAPI_AUTO_LENGTH,
- &prop_value));
- NODE_API_CALL_RETURN_VOID(env,
- napi_set_named_property(env, object, key, prop_value));
-}
-
-void add_last_status(napi_env env, const char* key, napi_value return_value) {
- napi_value prop_value;
- const napi_extended_error_info* p_last_error;
- NODE_API_CALL_RETURN_VOID(env,
- napi_get_last_error_info(env, &p_last_error));
-
- NODE_API_CALL_RETURN_VOID(env,
- napi_create_string_utf8(env,
- (p_last_error->error_message == NULL ?
- "napi_ok" :
- p_last_error->error_message),
- NAPI_AUTO_LENGTH,
- &prop_value));
- NODE_API_CALL_RETURN_VOID(env,
- napi_set_named_property(env, return_value, key, prop_value));
-}
diff --git a/test/js-native-api/common.h b/test/js-native-api/common.h
index 25b26fb09137c5..fc6f4cb6c2e1db 100644
--- a/test/js-native-api/common.h
+++ b/test/js-native-api/common.h
@@ -1,3 +1,6 @@
+#ifndef JS_NATIVE_API_COMMON_H_
+#define JS_NATIVE_API_COMMON_H_
+
#include
// Empty value so that macros here are able to return NULL or void
@@ -76,11 +79,17 @@
#define DECLARE_NODE_API_PROPERTY_VALUE(name, value) \
{ (name), NULL, NULL, NULL, NULL, (value), napi_default, NULL }
-void add_returned_status(napi_env env,
- const char* key,
- napi_value object,
- char* expected_message,
- napi_status expected_status,
- napi_status actual_status);
+static inline void add_returned_status(napi_env env,
+ const char* key,
+ napi_value object,
+ char* expected_message,
+ napi_status expected_status,
+ napi_status actual_status);
+
+static inline void add_last_status(napi_env env,
+ const char* key,
+ napi_value return_value);
+
+#include "common-inl.h"
-void add_last_status(napi_env env, const char* key, napi_value return_value);
+#endif // JS_NATIVE_API_COMMON_H_
diff --git a/test/js-native-api/entry_point.c b/test/js-native-api/entry_point.h
similarity index 55%
rename from test/js-native-api/entry_point.c
rename to test/js-native-api/entry_point.h
index 6b7b50a38c9535..5ba5aaffa62312 100644
--- a/test/js-native-api/entry_point.c
+++ b/test/js-native-api/entry_point.h
@@ -1,3 +1,6 @@
+#ifndef JS_NATIVE_API_ENTRY_POINT_H_
+#define JS_NATIVE_API_ENTRY_POINT_H_
+
#include
EXTERN_C_START
@@ -5,3 +8,5 @@ napi_value Init(napi_env env, napi_value exports);
EXTERN_C_END
NAPI_MODULE(NODE_GYP_MODULE_NAME, Init)
+
+#endif // JS_NATIVE_API_ENTRY_POINT_H_
diff --git a/test/js-native-api/test_array/binding.gyp b/test/js-native-api/test_array/binding.gyp
index feb6bd37d88b28..ba19b16e397ad8 100644
--- a/test/js-native-api/test_array/binding.gyp
+++ b/test/js-native-api/test_array/binding.gyp
@@ -3,7 +3,6 @@
{
"target_name": "test_array",
"sources": [
- "../entry_point.c",
"test_array.c"
]
}
diff --git a/test/js-native-api/test_array/test_array.c b/test/js-native-api/test_array/test_array.c
index 846755a97b7059..a4515025fc217c 100644
--- a/test/js-native-api/test_array/test_array.c
+++ b/test/js-native-api/test_array/test_array.c
@@ -1,6 +1,7 @@
#include
#include
#include "../common.h"
+#include "../entry_point.h"
static napi_value TestGetElement(napi_env env, napi_callback_info info) {
size_t argc = 2;
diff --git a/test/js-native-api/test_bigint/binding.gyp b/test/js-native-api/test_bigint/binding.gyp
index 84db32bf3ea131..6ef04b4394ae8a 100644
--- a/test/js-native-api/test_bigint/binding.gyp
+++ b/test/js-native-api/test_bigint/binding.gyp
@@ -3,7 +3,6 @@
{
"target_name": "test_bigint",
"sources": [
- "../entry_point.c",
"test_bigint.c"
]
}
diff --git a/test/js-native-api/test_bigint/test_bigint.c b/test/js-native-api/test_bigint/test_bigint.c
index c63c2f7fe29b44..2c61e0b217ecb4 100644
--- a/test/js-native-api/test_bigint/test_bigint.c
+++ b/test/js-native-api/test_bigint/test_bigint.c
@@ -1,8 +1,9 @@
-#include
#include
-#include
#include
+#include
+#include
#include "../common.h"
+#include "../entry_point.h"
static napi_value IsLossless(napi_env env, napi_callback_info info) {
size_t argc = 2;
diff --git a/test/js-native-api/test_cannot_run_js/binding.gyp b/test/js-native-api/test_cannot_run_js/binding.gyp
index 210417b47b17ec..0b827ff34d129f 100644
--- a/test/js-native-api/test_cannot_run_js/binding.gyp
+++ b/test/js-native-api/test_cannot_run_js/binding.gyp
@@ -1,32 +1,18 @@
{
"targets": [
- {
- "target_name": "copy_entry_point",
- "type": "none",
- "copies": [
- {
- "destination": ".",
- "files": [ "../entry_point.c" ]
- }
- ]
- },
{
"target_name": "test_cannot_run_js",
"sources": [
- "entry_point.c",
"test_cannot_run_js.c"
],
"defines": [ "NAPI_EXPERIMENTAL" ],
- "dependencies": [ "copy_entry_point" ],
},
{
"target_name": "test_pending_exception",
"sources": [
- "entry_point.c",
"test_cannot_run_js.c"
],
"defines": [ "NAPI_VERSION=8" ],
- "dependencies": [ "copy_entry_point" ],
}
]
}
diff --git a/test/js-native-api/test_cannot_run_js/test_cannot_run_js.c b/test/js-native-api/test_cannot_run_js/test_cannot_run_js.c
index 6f2a6e71b8dc22..2cd25823c924c0 100644
--- a/test/js-native-api/test_cannot_run_js/test_cannot_run_js.c
+++ b/test/js-native-api/test_cannot_run_js/test_cannot_run_js.c
@@ -1,5 +1,6 @@
#include
#include "../common.h"
+#include "../entry_point.h"
#include "stdlib.h"
static void Finalize(napi_env env, void* data, void* hint) {
diff --git a/test/js-native-api/test_constructor/binding.gyp b/test/js-native-api/test_constructor/binding.gyp
index 019114f64651c6..d796a9dbf1cf44 100644
--- a/test/js-native-api/test_constructor/binding.gyp
+++ b/test/js-native-api/test_constructor/binding.gyp
@@ -3,8 +3,6 @@
{
"target_name": "test_constructor",
"sources": [
- "../common.c",
- "../entry_point.c",
"test_constructor.c",
"test_null.c",
]
diff --git a/test/js-native-api/test_constructor/test_constructor.c b/test/js-native-api/test_constructor/test_constructor.c
index 92b03985513d36..c706170bac4a8c 100644
--- a/test/js-native-api/test_constructor/test_constructor.c
+++ b/test/js-native-api/test_constructor/test_constructor.c
@@ -1,5 +1,6 @@
#include
#include "../common.h"
+#include "../entry_point.h"
#include "test_null.h"
static double value_ = 1;
diff --git a/test/js-native-api/test_conversions/binding.gyp b/test/js-native-api/test_conversions/binding.gyp
index f1640c6638e41e..c286c3fd029203 100644
--- a/test/js-native-api/test_conversions/binding.gyp
+++ b/test/js-native-api/test_conversions/binding.gyp
@@ -3,8 +3,6 @@
{
"target_name": "test_conversions",
"sources": [
- "../entry_point.c",
- "../common.c",
"test_conversions.c",
"test_null.c",
]
diff --git a/test/js-native-api/test_conversions/test_conversions.c b/test/js-native-api/test_conversions/test_conversions.c
index 500962d5144e0f..89b93ef0112513 100644
--- a/test/js-native-api/test_conversions/test_conversions.c
+++ b/test/js-native-api/test_conversions/test_conversions.c
@@ -1,5 +1,6 @@
#include
#include "../common.h"
+#include "../entry_point.h"
#include "test_null.h"
static napi_value AsBool(napi_env env, napi_callback_info info) {
diff --git a/test/js-native-api/test_dataview/binding.gyp b/test/js-native-api/test_dataview/binding.gyp
index b8d641f5c0f34e..64235390812d79 100644
--- a/test/js-native-api/test_dataview/binding.gyp
+++ b/test/js-native-api/test_dataview/binding.gyp
@@ -3,7 +3,6 @@
{
"target_name": "test_dataview",
"sources": [
- "../entry_point.c",
"test_dataview.c"
]
}
diff --git a/test/js-native-api/test_dataview/test_dataview.c b/test/js-native-api/test_dataview/test_dataview.c
index c614a79818cb85..9f62b734c6a9ef 100644
--- a/test/js-native-api/test_dataview/test_dataview.c
+++ b/test/js-native-api/test_dataview/test_dataview.c
@@ -1,6 +1,7 @@
#include
#include
#include "../common.h"
+#include "../entry_point.h"
static napi_value CreateDataView(napi_env env, napi_callback_info info) {
size_t argc = 3;
diff --git a/test/js-native-api/test_date/binding.gyp b/test/js-native-api/test_date/binding.gyp
index a65a4e1387235a..6039d122c7649a 100644
--- a/test/js-native-api/test_date/binding.gyp
+++ b/test/js-native-api/test_date/binding.gyp
@@ -3,7 +3,6 @@
{
"target_name": "test_date",
"sources": [
- "../entry_point.c",
"test_date.c"
]
}
diff --git a/test/js-native-api/test_date/test_date.c b/test/js-native-api/test_date/test_date.c
index d5e9c778a9cd8f..ef87d6da350d02 100644
--- a/test/js-native-api/test_date/test_date.c
+++ b/test/js-native-api/test_date/test_date.c
@@ -1,5 +1,6 @@
#include
#include "../common.h"
+#include "../entry_point.h"
static napi_value createDate(napi_env env, napi_callback_info info) {
size_t argc = 1;
diff --git a/test/js-native-api/test_error/binding.gyp b/test/js-native-api/test_error/binding.gyp
index 617ececb89fcb5..46382427fe669c 100644
--- a/test/js-native-api/test_error/binding.gyp
+++ b/test/js-native-api/test_error/binding.gyp
@@ -3,7 +3,6 @@
{
"target_name": "test_error",
"sources": [
- "../entry_point.c",
"test_error.c"
]
}
diff --git a/test/js-native-api/test_error/test_error.c b/test/js-native-api/test_error/test_error.c
index 43e98921efadb0..f34798263dba3e 100644
--- a/test/js-native-api/test_error/test_error.c
+++ b/test/js-native-api/test_error/test_error.c
@@ -1,6 +1,7 @@
#define NAPI_VERSION 9
#include
#include "../common.h"
+#include "../entry_point.h"
static napi_value checkError(napi_env env, napi_callback_info info) {
size_t argc = 1;
diff --git a/test/js-native-api/test_exception/binding.gyp b/test/js-native-api/test_exception/binding.gyp
index 4844346a139338..e98a564a10feac 100644
--- a/test/js-native-api/test_exception/binding.gyp
+++ b/test/js-native-api/test_exception/binding.gyp
@@ -3,7 +3,6 @@
{
"target_name": "test_exception",
"sources": [
- "../entry_point.c",
"test_exception.c"
]
}
diff --git a/test/js-native-api/test_exception/test_exception.c b/test/js-native-api/test_exception/test_exception.c
index 053f048466d930..84b991961ae136 100644
--- a/test/js-native-api/test_exception/test_exception.c
+++ b/test/js-native-api/test_exception/test_exception.c
@@ -1,5 +1,6 @@
#include
#include "../common.h"
+#include "../entry_point.h"
static bool exceptionWasPending = false;
static int num = 0x23432;
diff --git a/test/js-native-api/test_function/binding.gyp b/test/js-native-api/test_function/binding.gyp
index df70facefc00cb..7ea9400c351b88 100644
--- a/test/js-native-api/test_function/binding.gyp
+++ b/test/js-native-api/test_function/binding.gyp
@@ -3,8 +3,6 @@
{
"target_name": "test_function",
"sources": [
- "../common.c",
- "../entry_point.c",
"test_function.c"
]
}
diff --git a/test/js-native-api/test_function/test_function.c b/test/js-native-api/test_function/test_function.c
index 107727872a0655..02a2988dc3e265 100644
--- a/test/js-native-api/test_function/test_function.c
+++ b/test/js-native-api/test_function/test_function.c
@@ -1,5 +1,6 @@
#include
#include "../common.h"
+#include "../entry_point.h"
static napi_value TestCreateFunctionParameters(napi_env env,
napi_callback_info info) {
diff --git a/test/js-native-api/test_general/binding.gyp b/test/js-native-api/test_general/binding.gyp
index 6a766dc5e40131..577a506f7fad73 100644
--- a/test/js-native-api/test_general/binding.gyp
+++ b/test/js-native-api/test_general/binding.gyp
@@ -3,7 +3,6 @@
{
"target_name": "test_general",
"sources": [
- "../entry_point.c",
"test_general.c"
]
}
diff --git a/test/js-native-api/test_general/test_general.c b/test/js-native-api/test_general/test_general.c
index b474ab442cb763..0cd1c54ee142f1 100644
--- a/test/js-native-api/test_general/test_general.c
+++ b/test/js-native-api/test_general/test_general.c
@@ -3,11 +3,12 @@
// not related to any of the other tests
// defined in the file
#define NODE_API_NO_EXTERNAL_BUFFERS_ALLOWED
+#include
+#include
#include
#include
-#include
-#include
#include "../common.h"
+#include "../entry_point.h"
static napi_value testStrictEquals(napi_env env, napi_callback_info info) {
size_t argc = 2;
diff --git a/test/js-native-api/test_handle_scope/binding.gyp b/test/js-native-api/test_handle_scope/binding.gyp
index 842bd5af7444ae..7959c47cb9039e 100644
--- a/test/js-native-api/test_handle_scope/binding.gyp
+++ b/test/js-native-api/test_handle_scope/binding.gyp
@@ -3,7 +3,6 @@
{
"target_name": "test_handle_scope",
"sources": [
- "../entry_point.c",
"test_handle_scope.c"
]
}
diff --git a/test/js-native-api/test_handle_scope/test_handle_scope.c b/test/js-native-api/test_handle_scope/test_handle_scope.c
index 681cc04c4f4b68..832ce545d1fabe 100644
--- a/test/js-native-api/test_handle_scope/test_handle_scope.c
+++ b/test/js-native-api/test_handle_scope/test_handle_scope.c
@@ -1,6 +1,7 @@
#include
-#include "../common.h"
#include
+#include "../common.h"
+#include "../entry_point.h"
// these tests validate the handle scope functions in the normal
// flow. Forcing gc behavior to fully validate they are doing
diff --git a/test/js-native-api/test_instance_data/binding.gyp b/test/js-native-api/test_instance_data/binding.gyp
index 5b2d4ff328b4fa..0d55905e9e7236 100644
--- a/test/js-native-api/test_instance_data/binding.gyp
+++ b/test/js-native-api/test_instance_data/binding.gyp
@@ -3,7 +3,6 @@
{
"target_name": "test_instance_data",
"sources": [
- "../entry_point.c",
"test_instance_data.c"
]
}
diff --git a/test/js-native-api/test_instance_data/test_instance_data.c b/test/js-native-api/test_instance_data/test_instance_data.c
index 95d41ed5f64994..5e33ddd75d47a5 100644
--- a/test/js-native-api/test_instance_data/test_instance_data.c
+++ b/test/js-native-api/test_instance_data/test_instance_data.c
@@ -1,7 +1,8 @@
+#include
#include
#include
-#include
#include "../common.h"
+#include "../entry_point.h"
typedef struct {
size_t value;
diff --git a/test/js-native-api/test_new_target/binding.gyp b/test/js-native-api/test_new_target/binding.gyp
index f9cc6e83758ced..1afe797d1402b8 100644
--- a/test/js-native-api/test_new_target/binding.gyp
+++ b/test/js-native-api/test_new_target/binding.gyp
@@ -4,7 +4,6 @@
'target_name': 'test_new_target',
'defines': [ 'V8_DEPRECATION_WARNINGS=1' ],
'sources': [
- '../entry_point.c',
'test_new_target.c'
]
}
diff --git a/test/js-native-api/test_new_target/test_new_target.c b/test/js-native-api/test_new_target/test_new_target.c
index d3fe5b0d2d9568..4e2be97419c7f3 100644
--- a/test/js-native-api/test_new_target/test_new_target.c
+++ b/test/js-native-api/test_new_target/test_new_target.c
@@ -1,5 +1,6 @@
#include
#include "../common.h"
+#include "../entry_point.h"
static napi_value BaseClass(napi_env env, napi_callback_info info) {
napi_value newTargetArg;
diff --git a/test/js-native-api/test_number/binding.gyp b/test/js-native-api/test_number/binding.gyp
index fa65304ba54089..c0a4cb62d9803e 100644
--- a/test/js-native-api/test_number/binding.gyp
+++ b/test/js-native-api/test_number/binding.gyp
@@ -3,8 +3,6 @@
{
"target_name": "test_number",
"sources": [
- "../common.c",
- "../entry_point.c",
"test_number.c",
"test_null.c",
]
diff --git a/test/js-native-api/test_number/test_number.c b/test/js-native-api/test_number/test_number.c
index c8d4733f580a05..b8169451e62ad1 100644
--- a/test/js-native-api/test_number/test_number.c
+++ b/test/js-native-api/test_number/test_number.c
@@ -1,5 +1,6 @@
#include
#include "../common.h"
+#include "../entry_point.h"
#include "test_null.h"
static napi_value Test(napi_env env, napi_callback_info info) {
diff --git a/test/js-native-api/test_object/binding.gyp b/test/js-native-api/test_object/binding.gyp
index e681f98f73ace6..b81f502584619e 100644
--- a/test/js-native-api/test_object/binding.gyp
+++ b/test/js-native-api/test_object/binding.gyp
@@ -3,8 +3,6 @@
{
"target_name": "test_object",
"sources": [
- "../common.c",
- "../entry_point.c",
"test_null.c",
"test_object.c"
]
diff --git a/test/js-native-api/test_object/test_object.c b/test/js-native-api/test_object/test_object.c
index eb5aa2071e30a3..eddf805187507e 100644
--- a/test/js-native-api/test_object/test_object.c
+++ b/test/js-native-api/test_object/test_object.c
@@ -1,6 +1,7 @@
#include
-#include "../common.h"
#include
+#include "../common.h"
+#include "../entry_point.h"
#include "test_null.h"
static int test_value = 3;
diff --git a/test/js-native-api/test_promise/binding.gyp b/test/js-native-api/test_promise/binding.gyp
index fd777daf5e02cc..de2802f8607dcf 100644
--- a/test/js-native-api/test_promise/binding.gyp
+++ b/test/js-native-api/test_promise/binding.gyp
@@ -3,7 +3,6 @@
{
"target_name": "test_promise",
"sources": [
- "../entry_point.c",
"test_promise.c"
]
}
diff --git a/test/js-native-api/test_promise/test_promise.c b/test/js-native-api/test_promise/test_promise.c
index 488ecea7853601..eef4813aa63774 100644
--- a/test/js-native-api/test_promise/test_promise.c
+++ b/test/js-native-api/test_promise/test_promise.c
@@ -1,5 +1,6 @@
#include
#include "../common.h"
+#include "../entry_point.h"
napi_deferred deferred = NULL;
diff --git a/test/js-native-api/test_properties/binding.gyp b/test/js-native-api/test_properties/binding.gyp
index adb6dd5ea151c3..ee38504eea75a6 100644
--- a/test/js-native-api/test_properties/binding.gyp
+++ b/test/js-native-api/test_properties/binding.gyp
@@ -3,7 +3,6 @@
{
"target_name": "test_properties",
"sources": [
- "../entry_point.c",
"test_properties.c"
]
}
diff --git a/test/js-native-api/test_properties/test_properties.c b/test/js-native-api/test_properties/test_properties.c
index d822d3628d87fa..567dd8c3a44ecd 100644
--- a/test/js-native-api/test_properties/test_properties.c
+++ b/test/js-native-api/test_properties/test_properties.c
@@ -1,6 +1,7 @@
#define NAPI_VERSION 9
#include
#include "../common.h"
+#include "../entry_point.h"
static double value_ = 1;
diff --git a/test/js-native-api/test_reference/binding.gyp b/test/js-native-api/test_reference/binding.gyp
index 518fd21c37c566..a9d81ef9d2c05d 100644
--- a/test/js-native-api/test_reference/binding.gyp
+++ b/test/js-native-api/test_reference/binding.gyp
@@ -3,9 +3,14 @@
{
"target_name": "test_reference",
"sources": [
- "../entry_point.c",
"test_reference.c"
]
+ },
+ {
+ "target_name": "test_finalizer",
+ "sources": [
+ "test_finalizer.c"
+ ]
}
]
}
diff --git a/test/js-native-api/test_reference/test_finalizer.c b/test/js-native-api/test_reference/test_finalizer.c
new file mode 100644
index 00000000000000..51492d9623f69c
--- /dev/null
+++ b/test/js-native-api/test_reference/test_finalizer.c
@@ -0,0 +1,71 @@
+#include
+#include
+#include
+#include "../common.h"
+#include "../entry_point.h"
+
+static int test_value = 1;
+static int finalize_count = 0;
+
+static void FinalizeExternalCallJs(napi_env env, void* data, void* hint) {
+ int* actual_value = data;
+ NODE_API_ASSERT_RETURN_VOID(
+ env,
+ actual_value == &test_value,
+ "The correct pointer was passed to the finalizer");
+
+ napi_ref finalizer_ref = (napi_ref)hint;
+ napi_value js_finalizer;
+ napi_value recv;
+ NODE_API_CALL_RETURN_VOID(
+ env, napi_get_reference_value(env, finalizer_ref, &js_finalizer));
+ NODE_API_CALL_RETURN_VOID(env, napi_get_global(env, &recv));
+ NODE_API_CALL_RETURN_VOID(
+ env, napi_call_function(env, recv, js_finalizer, 0, NULL, NULL));
+ NODE_API_CALL_RETURN_VOID(env, napi_delete_reference(env, finalizer_ref));
+}
+
+static napi_value CreateExternalWithJsFinalize(napi_env env,
+ napi_callback_info info) {
+ size_t argc = 1;
+ napi_value args[1];
+ NODE_API_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL));
+ NODE_API_ASSERT(env, argc == 1, "Wrong number of arguments");
+ napi_value finalizer = args[0];
+ napi_valuetype finalizer_valuetype;
+ NODE_API_CALL(env, napi_typeof(env, finalizer, &finalizer_valuetype));
+ NODE_API_ASSERT(env,
+ finalizer_valuetype == napi_function,
+ "Wrong type of first argument");
+ napi_ref finalizer_ref;
+ NODE_API_CALL(env, napi_create_reference(env, finalizer, 1, &finalizer_ref));
+
+ napi_value result;
+ NODE_API_CALL(env,
+ napi_create_external(env,
+ &test_value,
+ FinalizeExternalCallJs,
+ finalizer_ref, /* finalize_hint */
+ &result));
+
+ finalize_count = 0;
+ return result;
+}
+
+EXTERN_C_START
+napi_value Init(napi_env env, napi_value exports) {
+ napi_property_descriptor descriptors[] = {
+ DECLARE_NODE_API_PROPERTY("createExternalWithJsFinalize",
+ CreateExternalWithJsFinalize),
+ };
+
+ NODE_API_CALL(
+ env,
+ napi_define_properties(env,
+ exports,
+ sizeof(descriptors) / sizeof(*descriptors),
+ descriptors));
+
+ return exports;
+}
+EXTERN_C_END
diff --git a/test/js-native-api/test_reference/test_finalizer.js b/test/js-native-api/test_reference/test_finalizer.js
index b70582fd0342fe..a5270512dc87c1 100644
--- a/test/js-native-api/test_reference/test_finalizer.js
+++ b/test/js-native-api/test_reference/test_finalizer.js
@@ -2,7 +2,7 @@
// Flags: --expose-gc --force-node-api-uncaught-exceptions-policy
const common = require('../../common');
-const test_reference = require(`./build/${common.buildType}/test_reference`);
+const binding = require(`./build/${common.buildType}/test_finalizer`);
const assert = require('assert');
process.on('uncaughtException', common.mustCall((err) => {
@@ -11,7 +11,7 @@ process.on('uncaughtException', common.mustCall((err) => {
(async function() {
{
- test_reference.createExternalWithJsFinalize(
+ binding.createExternalWithJsFinalize(
common.mustCall(() => {
throw new Error('finalizer error');
}));
diff --git a/test/js-native-api/test_reference/test_reference.c b/test/js-native-api/test_reference/test_reference.c
index c17f27021b4215..058be07363588b 100644
--- a/test/js-native-api/test_reference/test_reference.c
+++ b/test/js-native-api/test_reference/test_reference.c
@@ -1,8 +1,9 @@
#define NAPI_VERSION 9
-#include
#include
#include
+#include
#include "../common.h"
+#include "../entry_point.h"
static int test_value = 1;
static int finalize_count = 0;
@@ -21,20 +22,6 @@ static void FinalizeExternal(napi_env env, void* data, void* hint) {
finalize_count++;
}
-static void FinalizeExternalCallJs(napi_env env, void* data, void* hint) {
- int *actual_value = data;
- NODE_API_ASSERT_RETURN_VOID(env, actual_value == &test_value,
- "The correct pointer was passed to the finalizer");
-
- napi_ref finalizer_ref = (napi_ref)hint;
- napi_value js_finalizer;
- napi_value recv;
- NODE_API_CALL_RETURN_VOID(env, napi_get_reference_value(env, finalizer_ref, &js_finalizer));
- NODE_API_CALL_RETURN_VOID(env, napi_get_global(env, &recv));
- NODE_API_CALL_RETURN_VOID(env, napi_call_function(env, recv, js_finalizer, 0, NULL, NULL));
- NODE_API_CALL_RETURN_VOID(env, napi_delete_reference(env, finalizer_ref));
-}
-
static napi_value CreateExternal(napi_env env, napi_callback_info info) {
int* data = &test_value;
@@ -51,40 +38,44 @@ static napi_value CreateExternal(napi_env env, napi_callback_info info) {
}
static napi_value CreateSymbol(napi_env env, napi_callback_info info) {
-
- size_t argc = 1;
- napi_value args[1];
-
- NODE_API_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL,NULL));
- NODE_API_ASSERT(env, argc == 1, "Expect one argument only (symbol description)");
-
- napi_value result_symbol;
-
- NODE_API_CALL(env, napi_create_symbol(env, args[0], &result_symbol));
- return result_symbol;
+ size_t argc = 1;
+ napi_value args[1];
+
+ NODE_API_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL));
+ NODE_API_ASSERT(
+ env, argc == 1, "Expect one argument only (symbol description)");
+
+ napi_value result_symbol;
+
+ NODE_API_CALL(env, napi_create_symbol(env, args[0], &result_symbol));
+ return result_symbol;
}
static napi_value CreateSymbolFor(napi_env env, napi_callback_info info) {
-
- size_t argc = 1;
- napi_value args[1];
-
- char description[256];
- size_t description_length;
-
- NODE_API_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL,NULL));
- NODE_API_ASSERT(env, argc == 1, "Expect one argument only (symbol description)");
-
- NODE_API_CALL(env, napi_get_value_string_utf8(env, args[0], description, sizeof(description), &description_length));
- NODE_API_ASSERT(env, description_length <= 255, "Cannot accommodate descriptions longer than 255 bytes");
-
- napi_value result_symbol;
-
- NODE_API_CALL(env, node_api_symbol_for(env,
- description,
- description_length,
- &result_symbol));
- return result_symbol;
+ size_t argc = 1;
+ napi_value args[1];
+
+ char description[256];
+ size_t description_length;
+
+ NODE_API_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL));
+ NODE_API_ASSERT(
+ env, argc == 1, "Expect one argument only (symbol description)");
+
+ NODE_API_CALL(
+ env,
+ napi_get_value_string_utf8(
+ env, args[0], description, sizeof(description), &description_length));
+ NODE_API_ASSERT(env,
+ description_length <= 255,
+ "Cannot accommodate descriptions longer than 255 bytes");
+
+ napi_value result_symbol;
+
+ NODE_API_CALL(env,
+ node_api_symbol_for(
+ env, description, description_length, &result_symbol));
+ return result_symbol;
}
static napi_value CreateSymbolForEmptyString(napi_env env, napi_callback_info info) {
@@ -113,31 +104,6 @@ CreateExternalWithFinalize(napi_env env, napi_callback_info info) {
return result;
}
-static napi_value
-CreateExternalWithJsFinalize(napi_env env, napi_callback_info info) {
- size_t argc = 1;
- napi_value args[1];
- NODE_API_CALL(env, napi_get_cb_info(env, info, &argc, args, NULL, NULL));
- NODE_API_ASSERT(env, argc == 1, "Wrong number of arguments");
- napi_value finalizer = args[0];
- napi_valuetype finalizer_valuetype;
- NODE_API_CALL(env, napi_typeof(env, finalizer, &finalizer_valuetype));
- NODE_API_ASSERT(env, finalizer_valuetype == napi_function, "Wrong type of first argument");
- napi_ref finalizer_ref;
- NODE_API_CALL(env, napi_create_reference(env, finalizer, 1, &finalizer_ref));
-
- napi_value result;
- NODE_API_CALL(env,
- napi_create_external(env,
- &test_value,
- FinalizeExternalCallJs,
- finalizer_ref, /* finalize_hint */
- &result));
-
- finalize_count = 0;
- return result;
-}
-
static napi_value CheckExternal(napi_env env, napi_callback_info info) {
size_t argc = 1;
napi_value arg;
@@ -258,24 +224,24 @@ static napi_value ValidateDeleteBeforeFinalize(napi_env env, napi_callback_info
EXTERN_C_START
napi_value Init(napi_env env, napi_value exports) {
napi_property_descriptor descriptors[] = {
- DECLARE_NODE_API_GETTER("finalizeCount", GetFinalizeCount),
- DECLARE_NODE_API_PROPERTY("createExternal", CreateExternal),
- DECLARE_NODE_API_PROPERTY("createExternalWithFinalize",
- CreateExternalWithFinalize),
- DECLARE_NODE_API_PROPERTY("createExternalWithJsFinalize",
- CreateExternalWithJsFinalize),
- DECLARE_NODE_API_PROPERTY("checkExternal", CheckExternal),
- DECLARE_NODE_API_PROPERTY("createReference", CreateReference),
- DECLARE_NODE_API_PROPERTY("createSymbol", CreateSymbol),
- DECLARE_NODE_API_PROPERTY("createSymbolFor", CreateSymbolFor),
- DECLARE_NODE_API_PROPERTY("createSymbolForEmptyString", CreateSymbolForEmptyString),
- DECLARE_NODE_API_PROPERTY("createSymbolForIncorrectLength", CreateSymbolForIncorrectLength),
- DECLARE_NODE_API_PROPERTY("deleteReference", DeleteReference),
- DECLARE_NODE_API_PROPERTY("incrementRefcount", IncrementRefcount),
- DECLARE_NODE_API_PROPERTY("decrementRefcount", DecrementRefcount),
- DECLARE_NODE_API_GETTER("referenceValue", GetReferenceValue),
- DECLARE_NODE_API_PROPERTY("validateDeleteBeforeFinalize",
- ValidateDeleteBeforeFinalize),
+ DECLARE_NODE_API_GETTER("finalizeCount", GetFinalizeCount),
+ DECLARE_NODE_API_PROPERTY("createExternal", CreateExternal),
+ DECLARE_NODE_API_PROPERTY("createExternalWithFinalize",
+ CreateExternalWithFinalize),
+ DECLARE_NODE_API_PROPERTY("checkExternal", CheckExternal),
+ DECLARE_NODE_API_PROPERTY("createReference", CreateReference),
+ DECLARE_NODE_API_PROPERTY("createSymbol", CreateSymbol),
+ DECLARE_NODE_API_PROPERTY("createSymbolFor", CreateSymbolFor),
+ DECLARE_NODE_API_PROPERTY("createSymbolForEmptyString",
+ CreateSymbolForEmptyString),
+ DECLARE_NODE_API_PROPERTY("createSymbolForIncorrectLength",
+ CreateSymbolForIncorrectLength),
+ DECLARE_NODE_API_PROPERTY("deleteReference", DeleteReference),
+ DECLARE_NODE_API_PROPERTY("incrementRefcount", IncrementRefcount),
+ DECLARE_NODE_API_PROPERTY("decrementRefcount", DecrementRefcount),
+ DECLARE_NODE_API_GETTER("referenceValue", GetReferenceValue),
+ DECLARE_NODE_API_PROPERTY("validateDeleteBeforeFinalize",
+ ValidateDeleteBeforeFinalize),
};
NODE_API_CALL(env, napi_define_properties(
diff --git a/test/js-native-api/test_reference_double_free/binding.gyp b/test/js-native-api/test_reference_double_free/binding.gyp
index 864846765d0132..2d906dadae6126 100644
--- a/test/js-native-api/test_reference_double_free/binding.gyp
+++ b/test/js-native-api/test_reference_double_free/binding.gyp
@@ -3,7 +3,6 @@
{
"target_name": "test_reference_double_free",
"sources": [
- "../entry_point.c",
"test_reference_double_free.c"
]
}
diff --git a/test/js-native-api/test_reference_double_free/test_reference_double_free.c b/test/js-native-api/test_reference_double_free/test_reference_double_free.c
index f491d237fded3e..0e0f91caf98458 100644
--- a/test/js-native-api/test_reference_double_free/test_reference_double_free.c
+++ b/test/js-native-api/test_reference_double_free/test_reference_double_free.c
@@ -1,6 +1,7 @@
-#include
#include |