diff --git a/.github/ISSUE_TEMPLATE/docs-enhancement.yml b/.github/ISSUE_TEMPLATE/docs-enhancement.yml
new file mode 100644
index 0000000000000..ee8db1e8841d9
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/docs-enhancement.yml
@@ -0,0 +1,37 @@
+name: 📄 CLI Docs Enhancement
+description: File a potential enhancement for the npm documentation
+title: "[DOCS]
"
+labels: [Documentation, Needs Triage]
+body:
+- type: checkboxes
+ attributes:
+ label: Is there an existing issue for this?
+ description: Please [search here](https://github.com/npm/cli/issues) to see if an issue already exists for your problem.
+ options:
+ - label: I have searched the existing issues
+ required: true
+- type: checkboxes
+ attributes:
+ label: This is a CLI Docs Enhancement, not another kind of Docs Enhancement.
+ description: These issue templates are only for CLI documentation enhancements. If you are looking to submit another kind of documentation enhancement, please submit it to the [documentation](https://github.com/npm/documentation) repo.
+ options:
+ - label: This is a CLI Docs Enhancement.
+ required: true
+- type: textarea
+ attributes:
+ label: Description of Problem
+ description: A clear & concise description of the current state of the docs.
+ validations:
+ required: true
+- type: textarea
+ attributes:
+ label: Potential Solution
+ description: A clear & concise description of the potential enhancement, if there is one.
+ validations:
+ required: false
+- type: input
+ attributes:
+ label: Docs URL
+ description: Please provide the URL of the page you'd like to see an enhancement to.
+ validations:
+ required: false
diff --git a/.github/ISSUE_TEMPLATE/docs-problem.yml b/.github/ISSUE_TEMPLATE/docs-problem.yml
new file mode 100644
index 0000000000000..6d509680979cd
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/docs-problem.yml
@@ -0,0 +1,37 @@
+name: 📄 CLI Docs Problem
+description: File a problem with the npm documentation
+title: "[DOCS] "
+labels: [Documentation, Needs Triage]
+body:
+- type: checkboxes
+ attributes:
+ label: Is there an existing issue for this?
+ description: Please [search here](https://github.com/npm/cli/issues) to see if an issue already exists for your problem.
+ options:
+ - label: I have searched the existing issues
+ required: true
+- type: checkboxes
+ attributes:
+ label: This is a CLI Docs Problem, not another kind of Docs Problem.
+ description: These issue templates are only for CLI documentation problems. If you are looking to submit another kind of documentation problem, please submit it to the [documentation](https://github.com/npm/documentation) repo.
+ options:
+ - label: This is a CLI Docs Problem.
+ required: true
+- type: textarea
+ attributes:
+ label: Description of Problem
+ description: A clear & concise description of what is wrong with the docs.
+ validations:
+ required: true
+- type: textarea
+ attributes:
+ label: Potential Solution
+ description: A clear & concise description of a potential solution or fix to the problem, if there is one.
+ validations:
+ required: false
+- type: input
+ attributes:
+ label: Affected URL
+ description: Please provide the affected URL.
+ validations:
+ required: false
diff --git a/AUTHORS b/AUTHORS
index 716cd48a07bae..6dbeba8723de8 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -815,3 +815,6 @@ yotamselementor <83912471+yotamselementor@users.noreply.github.com>
Felipe Plets
fncolon
Emin BuÄŸra Saral
+Tierney Cyren
+Guillaume Grossetie
+linkgoron
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 270d524b4b205..6693be193bd23 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,76 @@
+## v8.4.0 (2022-01-27)
+
+### Features
+
+* [`fbe48a840`](https://github.com/npm/cli/commit/fbe48a84047e0c5de31bdaa84707f0f8fdcef71d)
+ [#4307](https://github.com/npm/cli/pull/4307)
+ feat(arborist): add named updates validation
+ ([@ruyadorno](https://github.com/ruyadorno))
+
+### Bug Fixes
+
+* [`1f853f8bf`](https://github.com/npm/cli/commit/1f853f8bf7cecd1222703dde676a4b664526141d)
+ [#4306](https://github.com/npm/cli/pull/4306)
+ fix(arborist): load actual tree on named updates
+ ([@ruyadorno](https://github.com/ruyadorno))
+* [`90c384ccc`](https://github.com/npm/cli/commit/90c384ccccac32c80c481a04c438cbcbea82539c)
+ [#4326](https://github.com/npm/cli/pull/4326)
+ fix(logout): require proper auth.js from npm-registry-fetch
+ ([@wraithgar](https://github.com/wraithgar))
+* [`fabcf431a`](https://github.com/npm/cli/commit/fabcf431a63ecf93b56ae5d9a05ad4e7ef280c2a)
+ [#4327](https://github.com/npm/cli/pull/4327)
+ fix(arborist): correctly load overrides on workspace edges, closes #4205
+ ([@nlf](https://github.com/nlf))
+* [`8c3b143ca`](https://github.com/npm/cli/commit/8c3b143ca20d0da56c0ce2764e288a4c203b9f93)
+ [#4258](https://github.com/npm/cli/pull/4258)
+ fix(arborist): shrinkwrap throws when trying to read a folder without permissions
+ ([@Linkgoron](https://github.com/Linkgoron))
+* [`b51b29c56`](https://github.com/npm/cli/commit/b51b29c563fa97aa4fbf38250d1f04e879a8d961)
+ [#4334](https://github.com/npm/cli/pull/4334)
+ fix(arborist): update save exact
+ ([@ruyadorno](https://github.com/ruyadorno))
+
+### Dependencies
+
+* [`8558527c7`](https://github.com/npm/cli/commit/8558527c7158b2c1c353f8ab9c31de2a66ab470e)
+ [#4333](https://github.com/npm/cli/pull/4333)
+ deps: `make-fetch-happen@10.0.0`
+ * compress option and accept/content encoding header edge cases
+ * strip cookie header on redirect across hostnames
+* [`1bfc507f2`](https://github.com/npm/cli/commit/1bfc507f2a5afa02f04d4dea2fc6d151d4fef3ac)
+ [#4326](https://github.com/npm/cli/pull/4326)
+ deps: `npm-registry-fetch@12.0.1`
+* [`52c9608e7`](https://github.com/npm/cli/commit/52c9608e7bb1cda396b2cef3fc1b48dbaa2b7de3)
+ [#4326](https://github.com/npm/cli/pull/4326)
+ deps: `pacote@12.0.3`
+* [`2bbeedfeb`](https://github.com/npm/cli/commit/2bbeedfebb3aea082d612deb5e4d9de9e550c529)
+ [#4326](https://github.com/npm/cli/pull/4326)
+ deps: `npm-profile@6.0.0`
+* [`9652d685b`](https://github.com/npm/cli/commit/9652d685b1e4bd21cec107a611c2e307387623d6)
+ chore(release): `@npmcli/arborist@4.3.0`
+ ([@wraithgar](https://github.com/wraithgar))
+* [`0ee4927d2`](https://github.com/npm/cli/commit/0ee4927d2e8206dd24fa7eea5e1c10ea649ecc49)
+ chore(release): `libnpmaccess@5.0.1`
+ ([@wraithgar](https://github.com/wraithgar))
+* [`6c0dc1ffb`](https://github.com/npm/cli/commit/6c0dc1ffb70858be1e9ca9afdb6950e39609a367)
+ chore(release): `libnpmexec@3.0.3`
+ ([@wraithgar](https://github.com/wraithgar))
+* [`41b8f7b6f`](https://github.com/npm/cli/commit/41b8f7b6ff62f0e738865eb8e98df8650f5467bd)
+ chore(release): `libnpmorg@3.0.1`
+ ([@wraithgar](https://github.com/wraithgar))
+* [`433e6aafb`](https://github.com/npm/cli/commit/433e6aafbbf56efcf71e991767a6f00afe4aba7c)
+ chore(release): `libnpmpublish@5.0.1`
+ ([@wraithgar](https://github.com/wraithgar))
+* [`6654b6efe`](https://github.com/npm/cli/commit/6654b6efe02666bdb9864f4608e477ba132fd215)
+ chore(release): `libnpmsearch@4.0.1`
+ ([@wraithgar](https://github.com/wraithgar))
+* [`3423a9804`](https://github.com/npm/cli/commit/3423a980436492b7f0ee9e002517387a801f4f4a)
+ chore(release): `libnpmteam@3.0.1`
+ ([@wraithgar](https://github.com/wraithgar))
+* [`fb03e485d`](https://github.com/npm/cli/commit/fb03e485d9b1f09eb1cbcce00ee8e3e5c012097f)
+ chore(release): `libnpmhook@7.0.1`
+ ([@wraithgar](https://github.com/wraithgar))
+
## v8.3.2 (2022-01-20)
### Bug Fixes
diff --git a/lib/commands/logout.js b/lib/commands/logout.js
index 4e6bab9859551..aea5e93652b0e 100644
--- a/lib/commands/logout.js
+++ b/lib/commands/logout.js
@@ -1,4 +1,4 @@
-const getAuth = require('npm-registry-fetch/auth.js')
+const getAuth = require('npm-registry-fetch/lib/auth.js')
const npmFetch = require('npm-registry-fetch')
const log = require('../utils/log-shim')
const BaseCommand = require('../base-command.js')
diff --git a/node_modules/@tootallnate/once/LICENSE b/node_modules/@tootallnate/once/LICENSE
new file mode 100644
index 0000000000000..c4c56a2a53b2f
--- /dev/null
+++ b/node_modules/@tootallnate/once/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2020 Nathan Rajlich
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/node_modules/@tootallnate/once/dist/index.d.ts b/node_modules/@tootallnate/once/dist/index.d.ts
index a7efe943b2acb..93d02a9a348b5 100644
--- a/node_modules/@tootallnate/once/dist/index.d.ts
+++ b/node_modules/@tootallnate/once/dist/index.d.ts
@@ -1,14 +1,7 @@
///
import { EventEmitter } from 'events';
-declare function once(emitter: EventEmitter, name: string): once.CancelablePromise;
-declare namespace once {
- interface CancelFunction {
- (): void;
- }
- interface CancelablePromise extends Promise {
- cancel: CancelFunction;
- }
- type CancellablePromise = CancelablePromise;
- function spread(emitter: EventEmitter, name: string): once.CancelablePromise;
+import { EventNames, EventListenerParameters, AbortSignal } from './types';
+export interface OnceOptions {
+ signal?: AbortSignal;
}
-export = once;
+export default function once>(emitter: Emitter, name: Event, { signal }?: OnceOptions): Promise>;
diff --git a/node_modules/@tootallnate/once/dist/index.js b/node_modules/@tootallnate/once/dist/index.js
index bfd0dc88f758b..ca6385b1b82f8 100644
--- a/node_modules/@tootallnate/once/dist/index.js
+++ b/node_modules/@tootallnate/once/dist/index.js
@@ -1,39 +1,24 @@
"use strict";
-function noop() { }
-function once(emitter, name) {
- const o = once.spread(emitter, name);
- const r = o.then((args) => args[0]);
- r.cancel = o.cancel;
- return r;
-}
-(function (once) {
- function spread(emitter, name) {
- let c = null;
- const p = new Promise((resolve, reject) => {
- function cancel() {
- emitter.removeListener(name, onEvent);
- emitter.removeListener('error', onError);
- p.cancel = noop;
- }
- function onEvent(...args) {
- cancel();
- resolve(args);
- }
- function onError(err) {
- cancel();
- reject(err);
- }
- c = cancel;
- emitter.on(name, onEvent);
- emitter.on('error', onError);
- });
- if (!c) {
- throw new TypeError('Could not get `cancel()` function');
+Object.defineProperty(exports, "__esModule", { value: true });
+function once(emitter, name, { signal } = {}) {
+ return new Promise((resolve, reject) => {
+ function cleanup() {
+ signal === null || signal === void 0 ? void 0 : signal.removeEventListener('abort', cleanup);
+ emitter.removeListener(name, onEvent);
+ emitter.removeListener('error', onError);
+ }
+ function onEvent(...args) {
+ cleanup();
+ resolve(args);
}
- p.cancel = c;
- return p;
- }
- once.spread = spread;
-})(once || (once = {}));
-module.exports = once;
+ function onError(err) {
+ cleanup();
+ reject(err);
+ }
+ signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', cleanup);
+ emitter.on(name, onEvent);
+ emitter.on('error', onError);
+ });
+}
+exports.default = once;
//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@tootallnate/once/dist/index.js.map b/node_modules/@tootallnate/once/dist/index.js.map
index 30d20491dbca8..61708ca07f1b0 100644
--- a/node_modules/@tootallnate/once/dist/index.js.map
+++ b/node_modules/@tootallnate/once/dist/index.js.map
@@ -1 +1 @@
-{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAEA,SAAS,IAAI,KAAI,CAAC;AAElB,SAAS,IAAI,CACZ,OAAqB,EACrB,IAAY;IAEZ,MAAM,CAAC,GAAG,IAAI,CAAC,MAAM,CAAM,OAAO,EAAE,IAAI,CAAC,CAAC;IAC1C,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,IAAS,EAAE,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAA8B,CAAC;IACtE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,MAAM,CAAC;IACpB,OAAO,CAAC,CAAC;AACV,CAAC;AAED,WAAU,IAAI;IAWb,SAAgB,MAAM,CACrB,OAAqB,EACrB,IAAY;QAEZ,IAAI,CAAC,GAA+B,IAAI,CAAC;QACzC,MAAM,CAAC,GAAG,IAAI,OAAO,CAAI,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC5C,SAAS,MAAM;gBACd,OAAO,CAAC,cAAc,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;gBACtC,OAAO,CAAC,cAAc,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;gBACzC,CAAC,CAAC,MAAM,GAAG,IAAI,CAAC;YACjB,CAAC;YACD,SAAS,OAAO,CAAC,GAAG,IAAW;gBAC9B,MAAM,EAAE,CAAC;gBACT,OAAO,CAAC,IAAS,CAAC,CAAC;YACpB,CAAC;YACD,SAAS,OAAO,CAAC,GAAU;gBAC1B,MAAM,EAAE,CAAC;gBACT,MAAM,CAAC,GAAG,CAAC,CAAC;YACb,CAAC;YACD,CAAC,GAAG,MAAM,CAAC;YACX,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;YAC1B,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC9B,CAAC,CAA8B,CAAC;QAChC,IAAI,CAAC,CAAC,EAAE;YACP,MAAM,IAAI,SAAS,CAAC,mCAAmC,CAAC,CAAC;SACzD;QACD,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;QACb,OAAO,CAAC,CAAC;IACV,CAAC;IA5Be,WAAM,SA4BrB,CAAA;AACF,CAAC,EAxCS,IAAI,KAAJ,IAAI,QAwCb;AAED,iBAAS,IAAI,CAAC"}
\ No newline at end of file
+{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;AAOA,SAAwB,IAAI,CAI3B,OAAgB,EAChB,IAAW,EACX,EAAE,MAAM,KAAkB,EAAE;IAE5B,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACtC,SAAS,OAAO;YACf,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,mBAAmB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;YAC9C,OAAO,CAAC,cAAc,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;YACtC,OAAO,CAAC,cAAc,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC1C,CAAC;QACD,SAAS,OAAO,CAAC,GAAG,IAAW;YAC9B,OAAO,EAAE,CAAC;YACV,OAAO,CAAC,IAA+C,CAAC,CAAC;QAC1D,CAAC;QACD,SAAS,OAAO,CAAC,GAAU;YAC1B,OAAO,EAAE,CAAC;YACV,MAAM,CAAC,GAAG,CAAC,CAAC;QACb,CAAC;QACD,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,gBAAgB,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC3C,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;QAC1B,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;IAC9B,CAAC,CAAC,CAAC;AACJ,CAAC;AA1BD,uBA0BC"}
\ No newline at end of file
diff --git a/node_modules/@tootallnate/once/dist/overloaded-parameters.d.ts b/node_modules/@tootallnate/once/dist/overloaded-parameters.d.ts
new file mode 100644
index 0000000000000..eb2bbc6c6275e
--- /dev/null
+++ b/node_modules/@tootallnate/once/dist/overloaded-parameters.d.ts
@@ -0,0 +1,231 @@
+export declare type OverloadedParameters = T extends {
+ (...args: infer A1): any;
+ (...args: infer A2): any;
+ (...args: infer A3): any;
+ (...args: infer A4): any;
+ (...args: infer A5): any;
+ (...args: infer A6): any;
+ (...args: infer A7): any;
+ (...args: infer A8): any;
+ (...args: infer A9): any;
+ (...args: infer A10): any;
+ (...args: infer A11): any;
+ (...args: infer A12): any;
+ (...args: infer A13): any;
+ (...args: infer A14): any;
+ (...args: infer A15): any;
+ (...args: infer A16): any;
+ (...args: infer A17): any;
+ (...args: infer A18): any;
+ (...args: infer A19): any;
+ (...args: infer A20): any;
+} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 | A18 | A19 | A20 : T extends {
+ (...args: infer A1): any;
+ (...args: infer A2): any;
+ (...args: infer A3): any;
+ (...args: infer A4): any;
+ (...args: infer A5): any;
+ (...args: infer A6): any;
+ (...args: infer A7): any;
+ (...args: infer A8): any;
+ (...args: infer A9): any;
+ (...args: infer A10): any;
+ (...args: infer A11): any;
+ (...args: infer A12): any;
+ (...args: infer A13): any;
+ (...args: infer A14): any;
+ (...args: infer A15): any;
+ (...args: infer A16): any;
+ (...args: infer A17): any;
+ (...args: infer A18): any;
+ (...args: infer A19): any;
+} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 | A18 | A19 : T extends {
+ (...args: infer A1): any;
+ (...args: infer A2): any;
+ (...args: infer A3): any;
+ (...args: infer A4): any;
+ (...args: infer A5): any;
+ (...args: infer A6): any;
+ (...args: infer A7): any;
+ (...args: infer A8): any;
+ (...args: infer A9): any;
+ (...args: infer A10): any;
+ (...args: infer A11): any;
+ (...args: infer A12): any;
+ (...args: infer A13): any;
+ (...args: infer A14): any;
+ (...args: infer A15): any;
+ (...args: infer A16): any;
+ (...args: infer A17): any;
+ (...args: infer A18): any;
+} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 | A18 : T extends {
+ (...args: infer A1): any;
+ (...args: infer A2): any;
+ (...args: infer A3): any;
+ (...args: infer A4): any;
+ (...args: infer A5): any;
+ (...args: infer A6): any;
+ (...args: infer A7): any;
+ (...args: infer A8): any;
+ (...args: infer A9): any;
+ (...args: infer A10): any;
+ (...args: infer A11): any;
+ (...args: infer A12): any;
+ (...args: infer A13): any;
+ (...args: infer A14): any;
+ (...args: infer A15): any;
+ (...args: infer A16): any;
+ (...args: infer A17): any;
+} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 | A17 : T extends {
+ (...args: infer A1): any;
+ (...args: infer A2): any;
+ (...args: infer A3): any;
+ (...args: infer A4): any;
+ (...args: infer A5): any;
+ (...args: infer A6): any;
+ (...args: infer A7): any;
+ (...args: infer A8): any;
+ (...args: infer A9): any;
+ (...args: infer A10): any;
+ (...args: infer A11): any;
+ (...args: infer A12): any;
+ (...args: infer A13): any;
+ (...args: infer A14): any;
+ (...args: infer A15): any;
+ (...args: infer A16): any;
+} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 | A16 : T extends {
+ (...args: infer A1): any;
+ (...args: infer A2): any;
+ (...args: infer A3): any;
+ (...args: infer A4): any;
+ (...args: infer A5): any;
+ (...args: infer A6): any;
+ (...args: infer A7): any;
+ (...args: infer A8): any;
+ (...args: infer A9): any;
+ (...args: infer A10): any;
+ (...args: infer A11): any;
+ (...args: infer A12): any;
+ (...args: infer A13): any;
+ (...args: infer A14): any;
+ (...args: infer A15): any;
+} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 | A15 : T extends {
+ (...args: infer A1): any;
+ (...args: infer A2): any;
+ (...args: infer A3): any;
+ (...args: infer A4): any;
+ (...args: infer A5): any;
+ (...args: infer A6): any;
+ (...args: infer A7): any;
+ (...args: infer A8): any;
+ (...args: infer A9): any;
+ (...args: infer A10): any;
+ (...args: infer A11): any;
+ (...args: infer A12): any;
+ (...args: infer A13): any;
+ (...args: infer A14): any;
+} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 | A14 : T extends {
+ (...args: infer A1): any;
+ (...args: infer A2): any;
+ (...args: infer A3): any;
+ (...args: infer A4): any;
+ (...args: infer A5): any;
+ (...args: infer A6): any;
+ (...args: infer A7): any;
+ (...args: infer A8): any;
+ (...args: infer A9): any;
+ (...args: infer A10): any;
+ (...args: infer A11): any;
+ (...args: infer A12): any;
+ (...args: infer A13): any;
+} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 | A13 : T extends {
+ (...args: infer A1): any;
+ (...args: infer A2): any;
+ (...args: infer A3): any;
+ (...args: infer A4): any;
+ (...args: infer A5): any;
+ (...args: infer A6): any;
+ (...args: infer A7): any;
+ (...args: infer A8): any;
+ (...args: infer A9): any;
+ (...args: infer A10): any;
+ (...args: infer A11): any;
+ (...args: infer A12): any;
+} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 | A12 : T extends {
+ (...args: infer A1): any;
+ (...args: infer A2): any;
+ (...args: infer A3): any;
+ (...args: infer A4): any;
+ (...args: infer A5): any;
+ (...args: infer A6): any;
+ (...args: infer A7): any;
+ (...args: infer A8): any;
+ (...args: infer A9): any;
+ (...args: infer A10): any;
+ (...args: infer A11): any;
+} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 | A11 : T extends {
+ (...args: infer A1): any;
+ (...args: infer A2): any;
+ (...args: infer A3): any;
+ (...args: infer A4): any;
+ (...args: infer A5): any;
+ (...args: infer A6): any;
+ (...args: infer A7): any;
+ (...args: infer A8): any;
+ (...args: infer A9): any;
+ (...args: infer A10): any;
+} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 | A10 : T extends {
+ (...args: infer A1): any;
+ (...args: infer A2): any;
+ (...args: infer A3): any;
+ (...args: infer A4): any;
+ (...args: infer A5): any;
+ (...args: infer A6): any;
+ (...args: infer A7): any;
+ (...args: infer A8): any;
+ (...args: infer A9): any;
+} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 | A9 : T extends {
+ (...args: infer A1): any;
+ (...args: infer A2): any;
+ (...args: infer A3): any;
+ (...args: infer A4): any;
+ (...args: infer A5): any;
+ (...args: infer A6): any;
+ (...args: infer A7): any;
+ (...args: infer A8): any;
+} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 | A8 : T extends {
+ (...args: infer A1): any;
+ (...args: infer A2): any;
+ (...args: infer A3): any;
+ (...args: infer A4): any;
+ (...args: infer A5): any;
+ (...args: infer A6): any;
+ (...args: infer A7): any;
+} ? A1 | A2 | A3 | A4 | A5 | A6 | A7 : T extends {
+ (...args: infer A1): any;
+ (...args: infer A2): any;
+ (...args: infer A3): any;
+ (...args: infer A4): any;
+ (...args: infer A5): any;
+ (...args: infer A6): any;
+} ? A1 | A2 | A3 | A4 | A5 | A6 : T extends {
+ (...args: infer A1): any;
+ (...args: infer A2): any;
+ (...args: infer A3): any;
+ (...args: infer A4): any;
+ (...args: infer A5): any;
+} ? A1 | A2 | A3 | A4 | A5 : T extends {
+ (...args: infer A1): any;
+ (...args: infer A2): any;
+ (...args: infer A3): any;
+ (...args: infer A4): any;
+} ? A1 | A2 | A3 | A4 : T extends {
+ (...args: infer A1): any;
+ (...args: infer A2): any;
+ (...args: infer A3): any;
+} ? A1 | A2 | A3 : T extends {
+ (...args: infer A1): any;
+ (...args: infer A2): any;
+} ? A1 | A2 : T extends {
+ (...args: infer A1): any;
+} ? A1 : any;
diff --git a/node_modules/@tootallnate/once/dist/overloaded-parameters.js b/node_modules/@tootallnate/once/dist/overloaded-parameters.js
new file mode 100644
index 0000000000000..207186d9e7cca
--- /dev/null
+++ b/node_modules/@tootallnate/once/dist/overloaded-parameters.js
@@ -0,0 +1,3 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+//# sourceMappingURL=overloaded-parameters.js.map
\ No newline at end of file
diff --git a/node_modules/@tootallnate/once/dist/overloaded-parameters.js.map b/node_modules/@tootallnate/once/dist/overloaded-parameters.js.map
new file mode 100644
index 0000000000000..863f146d625f6
--- /dev/null
+++ b/node_modules/@tootallnate/once/dist/overloaded-parameters.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"overloaded-parameters.js","sourceRoot":"","sources":["../src/overloaded-parameters.ts"],"names":[],"mappings":""}
\ No newline at end of file
diff --git a/node_modules/@tootallnate/once/dist/types.d.ts b/node_modules/@tootallnate/once/dist/types.d.ts
new file mode 100644
index 0000000000000..58be8284ab8d3
--- /dev/null
+++ b/node_modules/@tootallnate/once/dist/types.d.ts
@@ -0,0 +1,17 @@
+///
+import { EventEmitter } from 'events';
+import { OverloadedParameters } from './overloaded-parameters';
+export declare type FirstParameter = T extends [infer R, ...any[]] ? R : never;
+export declare type EventListener = F extends [
+ T,
+ infer R,
+ ...any[]
+] ? R : never;
+export declare type EventParameters = OverloadedParameters;
+export declare type EventNames = FirstParameter>;
+export declare type EventListenerParameters> = WithDefault, Event>>, unknown[]>;
+export declare type WithDefault = [T] extends [never] ? D : T;
+export interface AbortSignal {
+ addEventListener: (name: string, listener: (...args: any[]) => any) => void;
+ removeEventListener: (name: string, listener: (...args: any[]) => any) => void;
+}
diff --git a/node_modules/@tootallnate/once/dist/types.js b/node_modules/@tootallnate/once/dist/types.js
new file mode 100644
index 0000000000000..11e638d1ee44a
--- /dev/null
+++ b/node_modules/@tootallnate/once/dist/types.js
@@ -0,0 +1,3 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+//# sourceMappingURL=types.js.map
\ No newline at end of file
diff --git a/node_modules/@tootallnate/once/dist/types.js.map b/node_modules/@tootallnate/once/dist/types.js.map
new file mode 100644
index 0000000000000..c768b79002615
--- /dev/null
+++ b/node_modules/@tootallnate/once/dist/types.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"types.js","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":""}
\ No newline at end of file
diff --git a/node_modules/@tootallnate/once/package.json b/node_modules/@tootallnate/once/package.json
index 8343f9fad73ab..69ce947d9c310 100644
--- a/node_modules/@tootallnate/once/package.json
+++ b/node_modules/@tootallnate/once/package.json
@@ -1,6 +1,6 @@
{
"name": "@tootallnate/once",
- "version": "1.1.2",
+ "version": "2.0.0",
"description": "Creates a Promise that waits for a single event",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
@@ -10,8 +10,7 @@
"scripts": {
"prebuild": "rimraf dist",
"build": "tsc",
- "test": "mocha --reporter spec",
- "test-lint": "eslint src --ext .js,.ts",
+ "test": "jest",
"prepublishOnly": "npm run build"
},
"repository": {
@@ -25,21 +24,29 @@
"url": "https://github.com/TooTallNate/once/issues"
},
"devDependencies": {
+ "@types/jest": "^27.0.2",
"@types/node": "^12.12.11",
- "@typescript-eslint/eslint-plugin": "1.6.0",
- "@typescript-eslint/parser": "1.1.0",
- "eslint": "5.16.0",
- "eslint-config-airbnb": "17.1.0",
- "eslint-config-prettier": "4.1.0",
- "eslint-import-resolver-typescript": "1.1.1",
- "eslint-plugin-import": "2.16.0",
- "eslint-plugin-jsx-a11y": "6.2.1",
- "eslint-plugin-react": "7.12.4",
- "mocha": "^6.2.2",
+ "abort-controller": "^3.0.0",
+ "jest": "^27.2.1",
"rimraf": "^3.0.0",
- "typescript": "^3.7.3"
+ "ts-jest": "^27.0.5",
+ "typescript": "^4.4.3"
},
"engines": {
- "node": ">= 6"
+ "node": ">= 10"
+ },
+ "jest": {
+ "preset": "ts-jest",
+ "globals": {
+ "ts-jest": {
+ "diagnostics": false,
+ "isolatedModules": true
+ }
+ },
+ "verbose": false,
+ "testEnvironment": "node",
+ "testMatch": [
+ "/test/**/*.test.ts"
+ ]
}
}
diff --git a/node_modules/http-proxy-agent/dist/agent.js b/node_modules/http-proxy-agent/dist/agent.js
index 0252850516819..aca8280431488 100644
--- a/node_modules/http-proxy-agent/dist/agent.js
+++ b/node_modules/http-proxy-agent/dist/agent.js
@@ -18,7 +18,7 @@ const url_1 = __importDefault(require("url"));
const debug_1 = __importDefault(require("debug"));
const once_1 = __importDefault(require("@tootallnate/once"));
const agent_base_1 = require("agent-base");
-const debug = debug_1.default('http-proxy-agent');
+const debug = (0, debug_1.default)('http-proxy-agent');
function isHTTPS(protocol) {
return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false;
}
@@ -86,7 +86,7 @@ class HttpProxyAgent extends agent_base_1.Agent {
if (parsed.port === '80') {
// if port is 80, then we can remove the port so that the
// ":80" portion is not on the produced URL
- delete parsed.port;
+ parsed.port = '';
}
// Change the `http.ClientRequest` instance's "path" field
// to the absolute path of the URL that will be requested.
@@ -136,7 +136,7 @@ class HttpProxyAgent extends agent_base_1.Agent {
// function throws instead of the `http` request machinery. This is
// important for i.e. `PacProxyAgent` which determines a failed proxy
// connection via the `callback()` function throwing.
- yield once_1.default(socket, 'connect');
+ yield (0, once_1.default)(socket, 'connect');
return socket;
});
}
diff --git a/node_modules/http-proxy-agent/dist/agent.js.map b/node_modules/http-proxy-agent/dist/agent.js.map
index 7a407620d8e50..bd3b56aa6dfdb 100644
--- a/node_modules/http-proxy-agent/dist/agent.js.map
+++ b/node_modules/http-proxy-agent/dist/agent.js.map
@@ -1 +1 @@
-{"version":3,"file":"agent.js","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,8CAAsB;AACtB,8CAAsB;AACtB,8CAAsB;AACtB,kDAAgC;AAChC,6DAAqC;AACrC,2CAAkE;AAGlE,MAAM,KAAK,GAAG,eAAW,CAAC,kBAAkB,CAAC,CAAC;AAY9C,SAAS,OAAO,CAAC,QAAwB;IACxC,OAAO,OAAO,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3E,CAAC;AAED;;;;;GAKG;AACH,MAAqB,cAAe,SAAQ,kBAAK;IAIhD,YAAY,KAAqC;QAChD,IAAI,IAA2B,CAAC;QAChC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC9B,IAAI,GAAG,aAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACxB;aAAM;YACN,IAAI,GAAG,KAAK,CAAC;SACb;QACD,IAAI,CAAC,IAAI,EAAE;YACV,MAAM,IAAI,KAAK,CACd,8DAA8D,CAC9D,CAAC;SACF;QACD,KAAK,CAAC,0CAA0C,EAAE,IAAI,CAAC,CAAC;QACxD,KAAK,CAAC,IAAI,CAAC,CAAC;QAEZ,MAAM,KAAK,qBAA+B,IAAI,CAAE,CAAC;QAEjD,wDAAwD;QACxD,uBAAuB;QACvB,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAE/D,+DAA+D;QAC/D,KAAK,CAAC,IAAI,GAAG,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,IAAI,CAAC;QAC1C,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACnC,KAAK,CAAC,IAAI,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;SACtC;QACD,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC9B,KAAK,CAAC,IAAI,GAAG,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;SACzC;QAED,IAAI,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC7B,kEAAkE;YAClE,8DAA8D;YAC9D,iEAAiE;YACjE,8BAA8B;YAC9B,OAAO,KAAK,CAAC,IAAI,CAAC;YAClB,OAAO,KAAK,CAAC,QAAQ,CAAC;SACtB;QAED,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACpB,CAAC;IAED;;;;;OAKG;IACG,QAAQ,CACb,GAAgC,EAChC,IAAoB;;YAEpB,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,GAAG,IAAI,CAAC;YACpC,MAAM,MAAM,GAAG,aAAG,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;YAEnC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,OAAO,CAAC;aAC1B;YAED,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC;aACrD;YAED,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,EAAE;gBAC5C,MAAM,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aAChC;YAED,IAAI,MAAM,CAAC,IAAI,KAAK,IAAI,EAAE;gBACzB,yDAAyD;gBACzD,2CAA2C;gBAC3C,OAAO,MAAM,CAAC,IAAI,CAAC;aACnB;YAED,0DAA0D;YAC1D,0DAA0D;YAC1D,GAAG,CAAC,IAAI,GAAG,aAAG,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;YAE9B,wDAAwD;YACxD,IAAI,KAAK,CAAC,IAAI,EAAE;gBACf,GAAG,CAAC,SAAS,CACZ,qBAAqB,EACrB,SAAS,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CACrD,CAAC;aACF;YAED,kDAAkD;YAClD,IAAI,MAAkB,CAAC;YACvB,IAAI,WAAW,EAAE;gBAChB,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA8B,CAAC,CAAC;aACrD;iBAAM;gBACN,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA2B,CAAC,CAAC;aAClD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,kEAAkE;YAClE,IAAI,GAAG,CAAC,OAAO,EAAE;gBAChB,IAAI,KAAa,CAAC;gBAClB,IAAI,YAAoB,CAAC;gBACzB,KAAK,CAAC,oDAAoD,CAAC,CAAC;gBAC5D,GAAG,CAAC,OAAO,GAAG,IAAI,CAAC;gBACnB,GAAG,CAAC,eAAe,EAAE,CAAC;gBACtB,IAAI,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;oBACxC,YAAY;oBACZ,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;oBACtB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC5D,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;iBACvC;qBAAM,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;oBACvD,aAAa;oBACb,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;oBAC/B,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI;wBACrB,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC7C,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;iBACnD;aACD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,qEAAqE;YACrE,qDAAqD;YACrD,MAAM,cAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;YAE9B,OAAO,MAAM,CAAC;QACf,CAAC;KAAA;CACD;AA1ID,iCA0IC"}
\ No newline at end of file
+{"version":3,"file":"agent.js","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,8CAAsB;AACtB,8CAAsB;AACtB,8CAAsB;AACtB,kDAAgC;AAChC,6DAAqC;AACrC,2CAAkE;AAGlE,MAAM,KAAK,GAAG,IAAA,eAAW,EAAC,kBAAkB,CAAC,CAAC;AAY9C,SAAS,OAAO,CAAC,QAAwB;IACxC,OAAO,OAAO,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3E,CAAC;AAED;;;;;GAKG;AACH,MAAqB,cAAe,SAAQ,kBAAK;IAIhD,YAAY,KAAqC;QAChD,IAAI,IAA2B,CAAC;QAChC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC9B,IAAI,GAAG,aAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACxB;aAAM;YACN,IAAI,GAAG,KAAK,CAAC;SACb;QACD,IAAI,CAAC,IAAI,EAAE;YACV,MAAM,IAAI,KAAK,CACd,8DAA8D,CAC9D,CAAC;SACF;QACD,KAAK,CAAC,0CAA0C,EAAE,IAAI,CAAC,CAAC;QACxD,KAAK,CAAC,IAAI,CAAC,CAAC;QAEZ,MAAM,KAAK,qBAA+B,IAAI,CAAE,CAAC;QAEjD,wDAAwD;QACxD,uBAAuB;QACvB,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAE/D,+DAA+D;QAC/D,KAAK,CAAC,IAAI,GAAG,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,IAAI,CAAC;QAC1C,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACnC,KAAK,CAAC,IAAI,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;SACtC;QACD,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC9B,KAAK,CAAC,IAAI,GAAG,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;SACzC;QAED,IAAI,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC7B,kEAAkE;YAClE,8DAA8D;YAC9D,iEAAiE;YACjE,8BAA8B;YAC9B,OAAO,KAAK,CAAC,IAAI,CAAC;YAClB,OAAO,KAAK,CAAC,QAAQ,CAAC;SACtB;QAED,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACpB,CAAC;IAED;;;;;OAKG;IACG,QAAQ,CACb,GAAgC,EAChC,IAAoB;;YAEpB,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,GAAG,IAAI,CAAC;YACpC,MAAM,MAAM,GAAG,aAAG,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;YAEnC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,OAAO,CAAC;aAC1B;YAED,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC;aACrD;YAED,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,EAAE;gBAC5C,MAAM,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aAChC;YAED,IAAI,MAAM,CAAC,IAAI,KAAK,IAAI,EAAE;gBACzB,yDAAyD;gBACzD,2CAA2C;gBAC3C,MAAM,CAAC,IAAI,GAAG,EAAE,CAAC;aACjB;YAED,0DAA0D;YAC1D,0DAA0D;YAC1D,GAAG,CAAC,IAAI,GAAG,aAAG,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;YAE9B,wDAAwD;YACxD,IAAI,KAAK,CAAC,IAAI,EAAE;gBACf,GAAG,CAAC,SAAS,CACZ,qBAAqB,EACrB,SAAS,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CACrD,CAAC;aACF;YAED,kDAAkD;YAClD,IAAI,MAAkB,CAAC;YACvB,IAAI,WAAW,EAAE;gBAChB,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA8B,CAAC,CAAC;aACrD;iBAAM;gBACN,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA2B,CAAC,CAAC;aAClD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,kEAAkE;YAClE,IAAI,GAAG,CAAC,OAAO,EAAE;gBAChB,IAAI,KAAa,CAAC;gBAClB,IAAI,YAAoB,CAAC;gBACzB,KAAK,CAAC,oDAAoD,CAAC,CAAC;gBAC5D,GAAG,CAAC,OAAO,GAAG,IAAI,CAAC;gBACnB,GAAG,CAAC,eAAe,EAAE,CAAC;gBACtB,IAAI,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;oBACxC,YAAY;oBACZ,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;oBACtB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC5D,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;iBACvC;qBAAM,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;oBACvD,aAAa;oBACb,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;oBAC/B,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI;wBACrB,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC7C,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;iBACnD;aACD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,qEAAqE;YACrE,qDAAqD;YACrD,MAAM,IAAA,cAAI,EAAC,MAAM,EAAE,SAAS,CAAC,CAAC;YAE9B,OAAO,MAAM,CAAC;QACf,CAAC;KAAA;CACD;AA1ID,iCA0IC"}
\ No newline at end of file
diff --git a/node_modules/http-proxy-agent/package.json b/node_modules/http-proxy-agent/package.json
index 870dd5d8af267..659d6e11e80e4 100644
--- a/node_modules/http-proxy-agent/package.json
+++ b/node_modules/http-proxy-agent/package.json
@@ -1,6 +1,6 @@
{
"name": "http-proxy-agent",
- "version": "4.0.1",
+ "version": "5.0.0",
"description": "An HTTP(s) proxy `http.Agent` implementation for HTTP",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
@@ -30,13 +30,13 @@
"url": "https://github.com/TooTallNate/node-http-proxy-agent/issues"
},
"dependencies": {
- "@tootallnate/once": "1",
+ "@tootallnate/once": "2",
"agent-base": "6",
"debug": "4"
},
"devDependencies": {
"@types/debug": "4",
- "@types/node": "^12.12.11",
+ "@types/node": "^12.19.2",
"@typescript-eslint/eslint-plugin": "1.6.0",
"@typescript-eslint/parser": "1.1.0",
"eslint": "5.16.0",
@@ -49,7 +49,7 @@
"mocha": "^6.2.2",
"proxy": "1",
"rimraf": "^3.0.0",
- "typescript": "^3.5.3"
+ "typescript": "^4.4.3"
},
"engines": {
"node": ">= 6"
diff --git a/node_modules/make-fetch-happen/LICENSE b/node_modules/make-fetch-happen/LICENSE
index 8d28acf866d93..1808eb2844231 100644
--- a/node_modules/make-fetch-happen/LICENSE
+++ b/node_modules/make-fetch-happen/LICENSE
@@ -1,6 +1,6 @@
ISC License
-Copyright (c) npm, Inc.
+Copyright 2017-2022 (c) npm, Inc.
Permission to use, copy, modify, and/or distribute this software for
any purpose with or without fee is hereby granted, provided that the
diff --git a/node_modules/make-fetch-happen/lib/agent.js b/node_modules/make-fetch-happen/lib/agent.js
index 3675dd8ae981a..095c35c5a2523 100644
--- a/node_modules/make-fetch-happen/lib/agent.js
+++ b/node_modules/make-fetch-happen/lib/agent.js
@@ -50,11 +50,13 @@ function getAgent (uri, opts) {
: isHttps ? require('https').globalAgent
: require('http').globalAgent
- if (isLambda && !pxuri)
+ if (isLambda && !pxuri) {
return lambdaAgent
+ }
- if (AGENT_CACHE.peek(key))
+ if (AGENT_CACHE.peek(key)) {
return AGENT_CACHE.get(key)
+ }
if (pxuri) {
const pxopts = isLambda ? {
@@ -86,16 +88,19 @@ function getAgent (uri, opts) {
function checkNoProxy (uri, opts) {
const host = new url.URL(uri).hostname.split('.').reverse()
let noproxy = (opts.noProxy || getProcessEnv('no_proxy'))
- if (typeof noproxy === 'string')
+ if (typeof noproxy === 'string') {
noproxy = noproxy.split(/\s*,\s*/g)
+ }
return noproxy && noproxy.some(no => {
const noParts = no.split('.').filter(x => x).reverse()
- if (!noParts.length)
+ if (!noParts.length) {
return false
+ }
for (let i = 0; i < noParts.length; i++) {
- if (host[i] !== noParts[i])
+ if (host[i] !== noParts[i]) {
return false
+ }
}
return true
})
@@ -104,8 +109,9 @@ function checkNoProxy (uri, opts) {
module.exports.getProcessEnv = getProcessEnv
function getProcessEnv (env) {
- if (!env)
+ if (!env) {
return
+ }
let value
@@ -114,8 +120,9 @@ function getProcessEnv (env) {
value = process.env[e] ||
process.env[e.toUpperCase()] ||
process.env[e.toLowerCase()]
- if (typeof value !== 'undefined')
+ if (typeof value !== 'undefined') {
break
+ }
}
}
@@ -141,8 +148,9 @@ function getProxyUri (uri, opts) {
protocol === 'http:' &&
getProcessEnv(['https_proxy', 'http_proxy', 'proxy'])
)
- if (!proxy)
+ if (!proxy) {
return null
+ }
const parsedProxy = (typeof proxy === 'string') ? new url.URL(proxy) : proxy
@@ -177,13 +185,14 @@ function getProxy (proxyUrl, opts, isHttps) {
}
if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') {
- if (!isHttps)
+ if (!isHttps) {
return new HttpProxyAgent(popts)
- else
+ } else {
return new HttpsProxyAgent(popts)
- } else if (proxyUrl.protocol.startsWith('socks'))
+ }
+ } else if (proxyUrl.protocol.startsWith('socks')) {
return new SocksProxyAgent(popts)
- else {
+ } else {
throw Object.assign(
new Error(`unsupported proxy protocol: '${proxyUrl.protocol}'`),
{
diff --git a/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/make-fetch-happen/lib/cache/entry.js
index a2acea156ee6f..ae2ad8c7667f2 100644
--- a/node_modules/make-fetch-happen/lib/cache/entry.js
+++ b/node_modules/make-fetch-happen/lib/cache/entry.js
@@ -52,23 +52,31 @@ const getMetadata = (request, response, options) => {
url: request.url,
reqHeaders: {},
resHeaders: {},
+
+ // options on which we must match the request and vary the response
+ options: {
+ compress: options.compress != null ? options.compress : request.compress,
+ },
}
// only save the status if it's not a 200 or 304
- if (response.status !== 200 && response.status !== 304)
+ if (response.status !== 200 && response.status !== 304) {
metadata.status = response.status
+ }
for (const name of KEEP_REQUEST_HEADERS) {
- if (request.headers.has(name))
+ if (request.headers.has(name)) {
metadata.reqHeaders[name] = request.headers.get(name)
+ }
}
// if the request's host header differs from the host in the url
// we need to keep it, otherwise it's just noise and we ignore it
const host = request.headers.get('host')
const parsedUrl = new url.URL(request.url)
- if (host && parsedUrl.host !== host)
+ if (host && parsedUrl.host !== host) {
metadata.reqHeaders.host = host
+ }
// if the response has a vary header, make sure
// we store the relevant request headers too
@@ -82,25 +90,17 @@ const getMetadata = (request, response, options) => {
// copy any other request headers that will vary the response
const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/)
for (const name of varyHeaders) {
- // explicitly ignore accept-encoding here
- if (name !== 'accept-encoding' && request.headers.has(name))
+ if (request.headers.has(name)) {
metadata.reqHeaders[name] = request.headers.get(name)
+ }
}
}
}
for (const name of KEEP_RESPONSE_HEADERS) {
- if (response.headers.has(name))
+ if (response.headers.has(name)) {
metadata.resHeaders[name] = response.headers.get(name)
- }
-
- // we only store accept-encoding and content-encoding if the user
- // has disabled automatic compression and decompression in minipass-fetch
- // since if it's enabled (the default) then the content will have
- // already been decompressed making the header a lie
- if (options.compress === false) {
- metadata.reqHeaders['accept-encoding'] = request.headers.get('accept-encoding')
- metadata.resHeaders['content-encoding'] = response.headers.get('content-encoding')
+ }
}
return metadata
@@ -121,8 +121,9 @@ class CacheEntry {
// entry timestamp to determine staleness because cacache will update it
// when it verifies its data
this.entry.metadata.time = this.entry.metadata.time || this.entry.time
- } else
+ } else {
this.key = cacheKey(request)
+ }
this.options = options
@@ -143,9 +144,17 @@ class CacheEntry {
return entryA.policy.satisfies(entryB.request)
}, {
validateEntry: (entry) => {
+ // clean out entries with a buggy content-encoding value
+ if (entry.metadata &&
+ entry.metadata.resHeaders &&
+ entry.metadata.resHeaders['content-encoding'] === null) {
+ return false
+ }
+
// if an integrity is null, it needs to have a status specified
- if (entry.integrity === null)
+ if (entry.integrity === null) {
return !!(entry.metadata && entry.metadata.status)
+ }
return true
},
@@ -158,8 +167,9 @@ class CacheEntry {
// a cache mode of 'reload' means to behave as though we have no cache
// on the way to the network. return undefined to allow cacheFetch to
// create a brand new request no matter what.
- if (options.cache === 'reload')
+ if (options.cache === 'reload') {
return
+ }
// find the specific entry that satisfies the request
let match
@@ -194,6 +204,7 @@ class CacheEntry {
this[_request] = new Request(this.entry.metadata.url, {
method: 'GET',
headers: this.entry.metadata.reqHeaders,
+ ...this.entry.metadata.options,
})
}
@@ -235,7 +246,11 @@ class CacheEntry {
// if we got a status other than 200, 301, or 308,
// or the CachePolicy forbid storage, append the
// cache status header and return it untouched
- if (this.request.method !== 'GET' || ![200, 301, 308].includes(this.response.status) || !this.policy.storable()) {
+ if (
+ this.request.method !== 'GET' ||
+ ![200, 301, 308].includes(this.response.status) ||
+ !this.policy.storable()
+ ) {
this.response.headers.set('x-local-cache-status', 'skip')
return this.response
}
@@ -276,7 +291,8 @@ class CacheEntry {
abortStream = collector
collector.on('collect', (data) => {
// TODO if the cache write fails, log a warning but return the response anyway
- cacache.put(this.options.cachePath, this.key, data, cacheOpts).then(cacheWriteResolve, cacheWriteReject)
+ cacache.put(this.options.cachePath, this.key, data, cacheOpts)
+ .then(cacheWriteResolve, cacheWriteReject)
})
body.unshift(collector)
body.unshift(this.response.body)
@@ -305,8 +321,9 @@ class CacheEntry {
// know to be invalid to the cache
abortStream.destroy(err)
})
- } else
+ } else {
await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts)
+ }
// note: we do not set the x-local-cache-hash header because we do not know
// the hash value until after the write to the cache completes, which doesn't
@@ -347,25 +364,37 @@ class CacheEntry {
onResume = async () => {
removeOnResume()
try {
- const content = await cacache.get.byDigest(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize })
+ const content = await cacache.get.byDigest(
+ this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
+ )
body.end(content)
} catch (err) {
- if (err.code === 'EINTEGRITY')
- await cacache.rm.content(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize })
- if (err.code === 'ENOENT' || err.code === 'EINTEGRITY')
+ if (err.code === 'EINTEGRITY') {
+ await cacache.rm.content(
+ this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
+ )
+ }
+ if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') {
await CacheEntry.invalidate(this.request, this.options)
+ }
body.emit('error', err)
}
}
} else {
onResume = () => {
- const cacheStream = cacache.get.stream.byDigest(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize })
+ const cacheStream = cacache.get.stream.byDigest(
+ this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
+ )
cacheStream.on('error', async (err) => {
cacheStream.pause()
- if (err.code === 'EINTEGRITY')
- await cacache.rm.content(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize })
- if (err.code === 'ENOENT' || err.code === 'EINTEGRITY')
+ if (err.code === 'EINTEGRITY') {
+ await cacache.rm.content(
+ this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
+ )
+ }
+ if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') {
await CacheEntry.invalidate(this.request, this.options)
+ }
body.emit('error', err)
cacheStream.resume()
})
@@ -415,8 +444,9 @@ class CacheEntry {
// if the network fetch fails, return the stale
// cached response unless it has a cache-control
// of 'must-revalidate'
- if (!this.policy.mustRevalidate)
+ if (!this.policy.mustRevalidate) {
return this.respond(request.method, options, 'stale')
+ }
throw err
}
@@ -429,8 +459,12 @@ class CacheEntry {
// in the old cache entry to the new one, if the new metadata does not already
// include that header
for (const name of KEEP_RESPONSE_HEADERS) {
- if (!hasOwnProperty(metadata.resHeaders, name) && hasOwnProperty(this.entry.metadata.resHeaders, name))
+ if (
+ !hasOwnProperty(metadata.resHeaders, name) &&
+ hasOwnProperty(this.entry.metadata.resHeaders, name)
+ ) {
metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
+ }
}
try {
diff --git a/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/make-fetch-happen/lib/cache/errors.js
index 31e97c4b033c0..67a66573bebe6 100644
--- a/node_modules/make-fetch-happen/lib/cache/errors.js
+++ b/node_modules/make-fetch-happen/lib/cache/errors.js
@@ -1,5 +1,6 @@
class NotCachedError extends Error {
constructor (url) {
+ /* eslint-disable-next-line max-len */
super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`)
this.code = 'ENOTCACHED'
}
diff --git a/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/make-fetch-happen/lib/cache/index.js
index cca93d9b4eb5d..17a6425592bcf 100644
--- a/node_modules/make-fetch-happen/lib/cache/index.js
+++ b/node_modules/make-fetch-happen/lib/cache/index.js
@@ -8,8 +8,9 @@ const cacheFetch = async (request, options) => {
const entry = await CacheEntry.find(request, options)
if (!entry) {
// no cached result, if the cache mode is 'only-if-cached' that's a failure
- if (options.cache === 'only-if-cached')
+ if (options.cache === 'only-if-cached') {
throw new NotCachedError(request.url)
+ }
// otherwise, we make a request, store it and return it
const response = await remote(request, options)
@@ -19,8 +20,9 @@ const cacheFetch = async (request, options) => {
// we have a cached response that satisfies this request, however if the cache
// mode is 'no-cache' then we send the revalidation request no matter what
- if (options.cache === 'no-cache')
+ if (options.cache === 'no-cache') {
return entry.revalidate(request, options)
+ }
// if the cached entry is not stale, or if the cache mode is 'force-cache' or
// 'only-if-cached' we can respond with the cached entry. set the status
@@ -28,16 +30,18 @@ const cacheFetch = async (request, options) => {
const _needsRevalidation = entry.policy.needsRevalidation(request)
if (options.cache === 'force-cache' ||
options.cache === 'only-if-cached' ||
- !_needsRevalidation)
+ !_needsRevalidation) {
return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit')
+ }
// if we got here, the cache entry is stale so revalidate it
return entry.revalidate(request, options)
}
cacheFetch.invalidate = async (request, options) => {
- if (!options.cachePath)
+ if (!options.cachePath) {
return
+ }
return CacheEntry.invalidate(request, options)
}
diff --git a/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/make-fetch-happen/lib/cache/policy.js
index e0959f64ddf9d..ada3c8600dae9 100644
--- a/node_modules/make-fetch-happen/lib/cache/policy.js
+++ b/node_modules/make-fetch-happen/lib/cache/policy.js
@@ -2,19 +2,6 @@ const CacheSemantics = require('http-cache-semantics')
const Negotiator = require('negotiator')
const ssri = require('ssri')
-// HACK: negotiator lazy loads several of its own modules
-// as a micro optimization. we need to be sure that they're
-// in memory as soon as possible at startup so that we do
-// not try to lazy load them after the directory has been
-// retired during a self update of the npm CLI, we do this
-// by calling all of the methods that trigger a lazy load
-// on a fake instance.
-const preloadNegotiator = new Negotiator({ headers: {} })
-preloadNegotiator.charsets()
-preloadNegotiator.encodings()
-preloadNegotiator.languages()
-preloadNegotiator.mediaTypes()
-
// options passed to http-cache-semantics constructor
const policyOptions = {
shared: false,
@@ -31,6 +18,7 @@ const requestObject = (request) => {
method: request.method,
url: request.url,
headers: {},
+ compress: request.compress,
}
request.headers.forEach((value, key) => {
@@ -74,16 +62,19 @@ class CachePolicy {
// static method to quickly determine if a request alone is storable
static storable (request, options) {
// no cachePath means no caching
- if (!options.cachePath)
+ if (!options.cachePath) {
return false
+ }
// user explicitly asked not to cache
- if (options.cache === 'no-store')
+ if (options.cache === 'no-store') {
return false
+ }
// we only cache GET and HEAD requests
- if (!['GET', 'HEAD'].includes(request.method))
+ if (!['GET', 'HEAD'].includes(request.method)) {
return false
+ }
// otherwise, let http-cache-semantics make the decision
// based on the request's headers
@@ -94,23 +85,32 @@ class CachePolicy {
// returns true if the policy satisfies the request
satisfies (request) {
const _req = requestObject(request)
- if (this.request.headers.host !== _req.headers.host)
+ if (this.request.headers.host !== _req.headers.host) {
+ return false
+ }
+
+ if (this.request.compress !== _req.compress) {
return false
+ }
const negotiatorA = new Negotiator(this.request)
const negotiatorB = new Negotiator(_req)
- if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes()))
+ if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) {
return false
+ }
- if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages()))
+ if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) {
return false
+ }
- if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings()))
+ if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) {
return false
+ }
- if (this.options.integrity)
+ if (this.options.integrity) {
return ssri.parse(this.options.integrity).match(this.entry.integrity)
+ }
return true
}
diff --git a/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/make-fetch-happen/lib/fetch.js
index dfded79295da1..233ba67e16550 100644
--- a/node_modules/make-fetch-happen/lib/fetch.js
+++ b/node_modules/make-fetch-happen/lib/fetch.js
@@ -13,20 +13,28 @@ const remote = require('./remote.js')
// in the fetch being rejected if the redirect is
// possible but invalid for some reason
const canFollowRedirect = (request, response, options) => {
- if (!isRedirect(response.status))
+ if (!isRedirect(response.status)) {
return false
+ }
- if (options.redirect === 'manual')
+ if (options.redirect === 'manual') {
return false
+ }
- if (options.redirect === 'error')
- throw new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect', { code: 'ENOREDIRECT' })
+ if (options.redirect === 'error') {
+ throw new FetchError(`redirect mode is set to error: ${request.url}`,
+ 'no-redirect', { code: 'ENOREDIRECT' })
+ }
- if (!response.headers.has('location'))
- throw new FetchError(`redirect location header missing for: ${request.url}`, 'no-location', { code: 'EINVALIDREDIRECT' })
+ if (!response.headers.has('location')) {
+ throw new FetchError(`redirect location header missing for: ${request.url}`,
+ 'no-location', { code: 'EINVALIDREDIRECT' })
+ }
- if (request.counter >= request.follow)
- throw new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect', { code: 'EMAXREDIRECT' })
+ if (request.counter >= request.follow) {
+ throw new FetchError(`maximum redirect reached at: ${request.url}`,
+ 'max-redirect', { code: 'EMAXREDIRECT' })
+ }
return true
}
@@ -39,26 +47,34 @@ const getRedirect = (request, response, options) => {
const location = response.headers.get('location')
const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url)
// Comment below is used under the following license:
- // Copyright (c) 2010-2012 Mikeal Rogers
- // Licensed under the Apache License, Version 2.0 (the "License");
- // you may not use this file except in compliance with the License.
- // You may obtain a copy of the License at
- // http://www.apache.org/licenses/LICENSE-2.0
- // Unless required by applicable law or agreed to in writing,
- // software distributed under the License is distributed on an "AS
- // IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- // express or implied. See the License for the specific language
- // governing permissions and limitations under the License.
+ /**
+ * @license
+ * Copyright (c) 2010-2012 Mikeal Rogers
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an "AS
+ * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ * express or implied. See the License for the specific language
+ * governing permissions and limitations under the License.
+ */
// Remove authorization if changing hostnames (but not if just
// changing ports or protocols). This matches the behavior of request:
// https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
- if (new url.URL(request.url).hostname !== redirectUrl.hostname)
+ if (new url.URL(request.url).hostname !== redirectUrl.hostname) {
request.headers.delete('authorization')
+ request.headers.delete('cookie')
+ }
// for POST request with 301/302 response, or any request with 303 response,
// use GET when following redirect
- if (response.status === 303 || (request.method === 'POST' && [301, 302].includes(response.status))) {
+ if (
+ response.status === 303 ||
+ (request.method === 'POST' && [301, 302].includes(response.status))
+ ) {
_opts.method = 'GET'
_opts.body = null
request.headers.delete('content-length')
@@ -87,11 +103,13 @@ const fetch = async (request, options) => {
// request url
if (!['GET', 'HEAD'].includes(request.method) &&
response.status >= 200 &&
- response.status <= 399)
+ response.status <= 399) {
await cache.invalidate(request, options)
+ }
- if (!canFollowRedirect(request, response, options))
+ if (!canFollowRedirect(request, response, options)) {
return response
+ }
const redirect = getRedirect(request, response, options)
return fetch(redirect.request, redirect.options)
diff --git a/node_modules/make-fetch-happen/lib/options.js b/node_modules/make-fetch-happen/lib/options.js
index f6138e6e1d13a..a0c8664adf02a 100644
--- a/node_modules/make-fetch-happen/lib/options.js
+++ b/node_modules/make-fetch-happen/lib/options.js
@@ -7,36 +7,40 @@ const conditionalHeaders = [
]
const configureOptions = (opts) => {
- const {strictSSL, ...options} = { ...opts }
+ const { strictSSL, ...options } = { ...opts }
options.method = options.method ? options.method.toUpperCase() : 'GET'
options.rejectUnauthorized = strictSSL !== false
- if (!options.retry)
+ if (!options.retry) {
options.retry = { retries: 0 }
- else if (typeof options.retry === 'string') {
+ } else if (typeof options.retry === 'string') {
const retries = parseInt(options.retry, 10)
- if (isFinite(retries))
+ if (isFinite(retries)) {
options.retry = { retries }
- else
+ } else {
options.retry = { retries: 0 }
- } else if (typeof options.retry === 'number')
+ }
+ } else if (typeof options.retry === 'number') {
options.retry = { retries: options.retry }
- else
+ } else {
options.retry = { retries: 0, ...options.retry }
+ }
options.cache = options.cache || 'default'
if (options.cache === 'default') {
const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => {
return conditionalHeaders.includes(name.toLowerCase())
})
- if (hasConditionalHeader)
+ if (hasConditionalHeader) {
options.cache = 'no-store'
+ }
}
// cacheManager is deprecated, but if it's set and
// cachePath is not we should copy it to the new field
- if (options.cacheManager && !options.cachePath)
+ if (options.cacheManager && !options.cachePath) {
options.cachePath = options.cacheManager
+ }
return options
}
diff --git a/node_modules/make-fetch-happen/lib/remote.js b/node_modules/make-fetch-happen/lib/remote.js
index 7e4ed24edb530..a8b8d2a0198d4 100644
--- a/node_modules/make-fetch-happen/lib/remote.js
+++ b/node_modules/make-fetch-happen/lib/remote.js
@@ -29,11 +29,13 @@ const RETRY_TYPES = [
// and verifying response integrity
const remoteFetch = (request, options) => {
const agent = getAgent(request.url, options)
- if (!request.headers.has('connection'))
+ if (!request.headers.has('connection')) {
request.headers.set('connection', agent ? 'keep-alive' : 'close')
+ }
- if (!request.headers.has('user-agent'))
+ if (!request.headers.has('user-agent')) {
request.headers.set('user-agent', USER_AGENT)
+ }
// keep our own options since we're overriding the agent
// and the redirect mode
@@ -64,8 +66,9 @@ const remoteFetch = (request, options) => {
([408, 420, 429].includes(res.status) || res.status >= 500)
if (isRetriable) {
- if (typeof options.onRetry === 'function')
+ if (typeof options.onRetry === 'function') {
options.onRetry(res)
+ }
return retryHandler(res)
}
@@ -82,18 +85,21 @@ const remoteFetch = (request, options) => {
const isRetryError = err.retried instanceof fetch.Response ||
(RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type))
- if (req.method === 'POST' || isRetryError)
+ if (req.method === 'POST' || isRetryError) {
throw err
+ }
- if (typeof options.onRetry === 'function')
+ if (typeof options.onRetry === 'function') {
options.onRetry(err)
+ }
return retryHandler(err)
}
}, options.retry).catch((err) => {
// don't reject for http errors, just return them
- if (err.status >= 400 && err.type !== 'system')
+ if (err.status >= 400 && err.type !== 'system') {
return err
+ }
throw err
})
diff --git a/node_modules/make-fetch-happen/package.json b/node_modules/make-fetch-happen/package.json
index dae7b37da4069..7b61953e56f57 100644
--- a/node_modules/make-fetch-happen/package.json
+++ b/node_modules/make-fetch-happen/package.json
@@ -1,20 +1,23 @@
{
"name": "make-fetch-happen",
- "version": "9.1.0",
+ "version": "10.0.0",
"description": "Opinionated, caching, retrying fetch client",
"main": "lib/index.js",
"files": [
+ "bin",
"lib"
],
"scripts": {
- "preversion": "npm t",
+ "preversion": "npm test",
"postversion": "npm publish",
- "prepublishOnly": "git push --follow-tags",
+ "prepublishOnly": "git push origin --follow-tags",
"test": "tap",
"posttest": "npm run lint",
"eslint": "eslint",
- "lint": "npm run eslint -- lib test",
- "lintfix": "npm run lint -- --fix"
+ "lint": "eslint '**/*.js'",
+ "lintfix": "npm run lint -- --fix",
+ "postlint": "npm-template-check",
+ "snap": "tap"
},
"repository": "https://github.com/npm/make-fetch-happen",
"keywords": [
@@ -26,17 +29,13 @@
"cache",
"subresource integrity"
],
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@zkat.tech",
- "twitter": "maybekatz"
- },
+ "author": "GitHub Inc.",
"license": "ISC",
"dependencies": {
"agentkeepalive": "^4.1.3",
"cacache": "^15.2.0",
"http-cache-semantics": "^4.1.0",
- "http-proxy-agent": "^4.0.1",
+ "http-proxy-agent": "^5.0.0",
"https-proxy-agent": "^5.0.0",
"is-lambda": "^1.0.1",
"lru-cache": "^6.0.0",
@@ -45,20 +44,17 @@
"minipass-fetch": "^1.3.2",
"minipass-flush": "^1.0.5",
"minipass-pipeline": "^1.2.4",
- "negotiator": "^0.6.2",
+ "negotiator": "^0.6.3",
"promise-retry": "^2.0.1",
"socks-proxy-agent": "^6.0.0",
"ssri": "^8.0.0"
},
"devDependencies": {
- "eslint": "^7.26.0",
- "eslint-plugin-import": "^2.23.2",
- "eslint-plugin-node": "^11.1.0",
- "eslint-plugin-promise": "^5.1.0",
- "eslint-plugin-standard": "^5.0.0",
+ "@npmcli/template-oss": "^2.5.1",
+ "eslint": "^8.7.0",
"mkdirp": "^1.0.4",
"nock": "^13.0.11",
- "npmlog": "^5.0.0",
+ "npmlog": "^6.0.0",
"require-inject": "^1.4.2",
"rimraf": "^3.0.2",
"safe-buffer": "^5.2.1",
@@ -66,11 +62,14 @@
"tap": "^15.0.9"
},
"engines": {
- "node": ">= 10"
+ "node": "^12.13.0 || ^14.15.0 || >=16"
},
"tap": {
"color": 1,
"files": "test/*.js",
"check-coverage": true
+ },
+ "templateOSS": {
+ "version": "2.5.1"
}
}
diff --git a/node_modules/negotiator/HISTORY.md b/node_modules/negotiator/HISTORY.md
index 6d06c76aaa965..a9a544914c43b 100644
--- a/node_modules/negotiator/HISTORY.md
+++ b/node_modules/negotiator/HISTORY.md
@@ -1,3 +1,8 @@
+0.6.3 / 2022-01-22
+==================
+
+ * Revert "Lazy-load modules from main entry point"
+
0.6.2 / 2019-04-29
==================
diff --git a/node_modules/negotiator/index.js b/node_modules/negotiator/index.js
index 8d4f6a226cb0d..4788264b16c9f 100644
--- a/node_modules/negotiator/index.js
+++ b/node_modules/negotiator/index.js
@@ -8,12 +8,10 @@
'use strict';
-/**
- * Cached loaded submodules.
- * @private
- */
-
-var modules = Object.create(null);
+var preferredCharsets = require('./lib/charset')
+var preferredEncodings = require('./lib/encoding')
+var preferredLanguages = require('./lib/language')
+var preferredMediaTypes = require('./lib/mediaType')
/**
* Module exports.
@@ -43,7 +41,6 @@ Negotiator.prototype.charset = function charset(available) {
};
Negotiator.prototype.charsets = function charsets(available) {
- var preferredCharsets = loadModule('charset').preferredCharsets;
return preferredCharsets(this.request.headers['accept-charset'], available);
};
@@ -53,7 +50,6 @@ Negotiator.prototype.encoding = function encoding(available) {
};
Negotiator.prototype.encodings = function encodings(available) {
- var preferredEncodings = loadModule('encoding').preferredEncodings;
return preferredEncodings(this.request.headers['accept-encoding'], available);
};
@@ -63,7 +59,6 @@ Negotiator.prototype.language = function language(available) {
};
Negotiator.prototype.languages = function languages(available) {
- var preferredLanguages = loadModule('language').preferredLanguages;
return preferredLanguages(this.request.headers['accept-language'], available);
};
@@ -73,7 +68,6 @@ Negotiator.prototype.mediaType = function mediaType(available) {
};
Negotiator.prototype.mediaTypes = function mediaTypes(available) {
- var preferredMediaTypes = loadModule('mediaType').preferredMediaTypes;
return preferredMediaTypes(this.request.headers.accept, available);
};
@@ -86,39 +80,3 @@ Negotiator.prototype.preferredLanguage = Negotiator.prototype.language;
Negotiator.prototype.preferredLanguages = Negotiator.prototype.languages;
Negotiator.prototype.preferredMediaType = Negotiator.prototype.mediaType;
Negotiator.prototype.preferredMediaTypes = Negotiator.prototype.mediaTypes;
-
-/**
- * Load the given module.
- * @private
- */
-
-function loadModule(moduleName) {
- var module = modules[moduleName];
-
- if (module !== undefined) {
- return module;
- }
-
- // This uses a switch for static require analysis
- switch (moduleName) {
- case 'charset':
- module = require('./lib/charset');
- break;
- case 'encoding':
- module = require('./lib/encoding');
- break;
- case 'language':
- module = require('./lib/language');
- break;
- case 'mediaType':
- module = require('./lib/mediaType');
- break;
- default:
- throw new Error('Cannot find module \'' + moduleName + '\'');
- }
-
- // Store to prevent invoking require()
- modules[moduleName] = module;
-
- return module;
-}
diff --git a/node_modules/negotiator/lib/language.js b/node_modules/negotiator/lib/language.js
index 62f737f006021..a23167252719b 100644
--- a/node_modules/negotiator/lib/language.js
+++ b/node_modules/negotiator/lib/language.js
@@ -54,9 +54,9 @@ function parseLanguage(str, i) {
var match = simpleLanguageRegExp.exec(str);
if (!match) return null;
- var prefix = match[1],
- suffix = match[2],
- full = prefix;
+ var prefix = match[1]
+ var suffix = match[2]
+ var full = prefix
if (suffix) full += "-" + suffix;
diff --git a/node_modules/negotiator/package.json b/node_modules/negotiator/package.json
index 0c7ff3c2e6468..297635f6d3417 100644
--- a/node_modules/negotiator/package.json
+++ b/node_modules/negotiator/package.json
@@ -1,7 +1,7 @@
{
"name": "negotiator",
"description": "HTTP content negotiation",
- "version": "0.6.2",
+ "version": "0.6.3",
"contributors": [
"Douglas Christopher Wilson ",
"Federico Romero ",
@@ -18,10 +18,10 @@
],
"repository": "jshttp/negotiator",
"devDependencies": {
- "eslint": "5.16.0",
- "eslint-plugin-markdown": "1.0.0",
- "mocha": "6.1.4",
- "nyc": "14.0.0"
+ "eslint": "7.32.0",
+ "eslint-plugin-markdown": "2.2.1",
+ "mocha": "9.1.3",
+ "nyc": "15.1.0"
},
"files": [
"lib/",
@@ -34,9 +34,9 @@
"node": ">= 0.6"
},
"scripts": {
- "lint": "eslint --plugin markdown --ext js,md .",
+ "lint": "eslint .",
"test": "mocha --reporter spec --check-leaks --bail test/",
- "test-cov": "nyc --reporter=html --reporter=text npm test",
- "test-travis": "nyc --reporter=text npm test"
+ "test-ci": "nyc --reporter=lcov --reporter=text npm test",
+ "test-cov": "nyc --reporter=html --reporter=text npm test"
}
}
diff --git a/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.d.ts b/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.d.ts
new file mode 100644
index 0000000000000..a7efe943b2acb
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.d.ts
@@ -0,0 +1,14 @@
+///
+import { EventEmitter } from 'events';
+declare function once(emitter: EventEmitter, name: string): once.CancelablePromise;
+declare namespace once {
+ interface CancelFunction {
+ (): void;
+ }
+ interface CancelablePromise extends Promise {
+ cancel: CancelFunction;
+ }
+ type CancellablePromise = CancelablePromise;
+ function spread(emitter: EventEmitter, name: string): once.CancelablePromise;
+}
+export = once;
diff --git a/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.js b/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.js
new file mode 100644
index 0000000000000..bfd0dc88f758b
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.js
@@ -0,0 +1,39 @@
+"use strict";
+function noop() { }
+function once(emitter, name) {
+ const o = once.spread(emitter, name);
+ const r = o.then((args) => args[0]);
+ r.cancel = o.cancel;
+ return r;
+}
+(function (once) {
+ function spread(emitter, name) {
+ let c = null;
+ const p = new Promise((resolve, reject) => {
+ function cancel() {
+ emitter.removeListener(name, onEvent);
+ emitter.removeListener('error', onError);
+ p.cancel = noop;
+ }
+ function onEvent(...args) {
+ cancel();
+ resolve(args);
+ }
+ function onError(err) {
+ cancel();
+ reject(err);
+ }
+ c = cancel;
+ emitter.on(name, onEvent);
+ emitter.on('error', onError);
+ });
+ if (!c) {
+ throw new TypeError('Could not get `cancel()` function');
+ }
+ p.cancel = c;
+ return p;
+ }
+ once.spread = spread;
+})(once || (once = {}));
+module.exports = once;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.js.map b/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.js.map
new file mode 100644
index 0000000000000..30d20491dbca8
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/@tootallnate/once/dist/index.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";AAEA,SAAS,IAAI,KAAI,CAAC;AAElB,SAAS,IAAI,CACZ,OAAqB,EACrB,IAAY;IAEZ,MAAM,CAAC,GAAG,IAAI,CAAC,MAAM,CAAM,OAAO,EAAE,IAAI,CAAC,CAAC;IAC1C,MAAM,CAAC,GAAG,CAAC,CAAC,IAAI,CAAC,CAAC,IAAS,EAAE,EAAE,CAAC,IAAI,CAAC,CAAC,CAAC,CAA8B,CAAC;IACtE,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC,MAAM,CAAC;IACpB,OAAO,CAAC,CAAC;AACV,CAAC;AAED,WAAU,IAAI;IAWb,SAAgB,MAAM,CACrB,OAAqB,EACrB,IAAY;QAEZ,IAAI,CAAC,GAA+B,IAAI,CAAC;QACzC,MAAM,CAAC,GAAG,IAAI,OAAO,CAAI,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YAC5C,SAAS,MAAM;gBACd,OAAO,CAAC,cAAc,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;gBACtC,OAAO,CAAC,cAAc,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;gBACzC,CAAC,CAAC,MAAM,GAAG,IAAI,CAAC;YACjB,CAAC;YACD,SAAS,OAAO,CAAC,GAAG,IAAW;gBAC9B,MAAM,EAAE,CAAC;gBACT,OAAO,CAAC,IAAS,CAAC,CAAC;YACpB,CAAC;YACD,SAAS,OAAO,CAAC,GAAU;gBAC1B,MAAM,EAAE,CAAC;gBACT,MAAM,CAAC,GAAG,CAAC,CAAC;YACb,CAAC;YACD,CAAC,GAAG,MAAM,CAAC;YACX,OAAO,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,CAAC,CAAC;YAC1B,OAAO,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC9B,CAAC,CAA8B,CAAC;QAChC,IAAI,CAAC,CAAC,EAAE;YACP,MAAM,IAAI,SAAS,CAAC,mCAAmC,CAAC,CAAC;SACzD;QACD,CAAC,CAAC,MAAM,GAAG,CAAC,CAAC;QACb,OAAO,CAAC,CAAC;IACV,CAAC;IA5Be,WAAM,SA4BrB,CAAA;AACF,CAAC,EAxCS,IAAI,KAAJ,IAAI,QAwCb;AAED,iBAAS,IAAI,CAAC"}
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/@tootallnate/once/package.json b/node_modules/node-gyp/node_modules/@tootallnate/once/package.json
new file mode 100644
index 0000000000000..8343f9fad73ab
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/@tootallnate/once/package.json
@@ -0,0 +1,45 @@
+{
+ "name": "@tootallnate/once",
+ "version": "1.1.2",
+ "description": "Creates a Promise that waits for a single event",
+ "main": "./dist/index.js",
+ "types": "./dist/index.d.ts",
+ "files": [
+ "dist"
+ ],
+ "scripts": {
+ "prebuild": "rimraf dist",
+ "build": "tsc",
+ "test": "mocha --reporter spec",
+ "test-lint": "eslint src --ext .js,.ts",
+ "prepublishOnly": "npm run build"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/TooTallNate/once.git"
+ },
+ "keywords": [],
+ "author": "Nathan Rajlich (http://n8.io/)",
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/TooTallNate/once/issues"
+ },
+ "devDependencies": {
+ "@types/node": "^12.12.11",
+ "@typescript-eslint/eslint-plugin": "1.6.0",
+ "@typescript-eslint/parser": "1.1.0",
+ "eslint": "5.16.0",
+ "eslint-config-airbnb": "17.1.0",
+ "eslint-config-prettier": "4.1.0",
+ "eslint-import-resolver-typescript": "1.1.1",
+ "eslint-plugin-import": "2.16.0",
+ "eslint-plugin-jsx-a11y": "6.2.1",
+ "eslint-plugin-react": "7.12.4",
+ "mocha": "^6.2.2",
+ "rimraf": "^3.0.0",
+ "typescript": "^3.7.3"
+ },
+ "engines": {
+ "node": ">= 6"
+ }
+}
diff --git a/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.d.ts b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.d.ts
new file mode 100644
index 0000000000000..3f043f7f9f756
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.d.ts
@@ -0,0 +1,32 @@
+///
+import net from 'net';
+import { Agent, ClientRequest, RequestOptions } from 'agent-base';
+import { HttpProxyAgentOptions } from '.';
+interface HttpProxyAgentClientRequest extends ClientRequest {
+ path: string;
+ output?: string[];
+ outputData?: {
+ data: string;
+ }[];
+ _header?: string | null;
+ _implicitHeader(): void;
+}
+/**
+ * The `HttpProxyAgent` implements an HTTP Agent subclass that connects
+ * to the specified "HTTP proxy server" in order to proxy HTTP requests.
+ *
+ * @api public
+ */
+export default class HttpProxyAgent extends Agent {
+ private secureProxy;
+ private proxy;
+ constructor(_opts: string | HttpProxyAgentOptions);
+ /**
+ * Called when the node-core HTTP client library is creating a
+ * new HTTP request.
+ *
+ * @api protected
+ */
+ callback(req: HttpProxyAgentClientRequest, opts: RequestOptions): Promise;
+}
+export {};
diff --git a/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.js b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.js
new file mode 100644
index 0000000000000..0252850516819
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.js
@@ -0,0 +1,145 @@
+"use strict";
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const net_1 = __importDefault(require("net"));
+const tls_1 = __importDefault(require("tls"));
+const url_1 = __importDefault(require("url"));
+const debug_1 = __importDefault(require("debug"));
+const once_1 = __importDefault(require("@tootallnate/once"));
+const agent_base_1 = require("agent-base");
+const debug = debug_1.default('http-proxy-agent');
+function isHTTPS(protocol) {
+ return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false;
+}
+/**
+ * The `HttpProxyAgent` implements an HTTP Agent subclass that connects
+ * to the specified "HTTP proxy server" in order to proxy HTTP requests.
+ *
+ * @api public
+ */
+class HttpProxyAgent extends agent_base_1.Agent {
+ constructor(_opts) {
+ let opts;
+ if (typeof _opts === 'string') {
+ opts = url_1.default.parse(_opts);
+ }
+ else {
+ opts = _opts;
+ }
+ if (!opts) {
+ throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!');
+ }
+ debug('Creating new HttpProxyAgent instance: %o', opts);
+ super(opts);
+ const proxy = Object.assign({}, opts);
+ // If `true`, then connect to the proxy server over TLS.
+ // Defaults to `false`.
+ this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol);
+ // Prefer `hostname` over `host`, and set the `port` if needed.
+ proxy.host = proxy.hostname || proxy.host;
+ if (typeof proxy.port === 'string') {
+ proxy.port = parseInt(proxy.port, 10);
+ }
+ if (!proxy.port && proxy.host) {
+ proxy.port = this.secureProxy ? 443 : 80;
+ }
+ if (proxy.host && proxy.path) {
+ // If both a `host` and `path` are specified then it's most likely
+ // the result of a `url.parse()` call... we need to remove the
+ // `path` portion so that `net.connect()` doesn't attempt to open
+ // that as a Unix socket file.
+ delete proxy.path;
+ delete proxy.pathname;
+ }
+ this.proxy = proxy;
+ }
+ /**
+ * Called when the node-core HTTP client library is creating a
+ * new HTTP request.
+ *
+ * @api protected
+ */
+ callback(req, opts) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const { proxy, secureProxy } = this;
+ const parsed = url_1.default.parse(req.path);
+ if (!parsed.protocol) {
+ parsed.protocol = 'http:';
+ }
+ if (!parsed.hostname) {
+ parsed.hostname = opts.hostname || opts.host || null;
+ }
+ if (parsed.port == null && typeof opts.port) {
+ parsed.port = String(opts.port);
+ }
+ if (parsed.port === '80') {
+ // if port is 80, then we can remove the port so that the
+ // ":80" portion is not on the produced URL
+ delete parsed.port;
+ }
+ // Change the `http.ClientRequest` instance's "path" field
+ // to the absolute path of the URL that will be requested.
+ req.path = url_1.default.format(parsed);
+ // Inject the `Proxy-Authorization` header if necessary.
+ if (proxy.auth) {
+ req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`);
+ }
+ // Create a socket connection to the proxy server.
+ let socket;
+ if (secureProxy) {
+ debug('Creating `tls.Socket`: %o', proxy);
+ socket = tls_1.default.connect(proxy);
+ }
+ else {
+ debug('Creating `net.Socket`: %o', proxy);
+ socket = net_1.default.connect(proxy);
+ }
+ // At this point, the http ClientRequest's internal `_header` field
+ // might have already been set. If this is the case then we'll need
+ // to re-generate the string since we just changed the `req.path`.
+ if (req._header) {
+ let first;
+ let endOfHeaders;
+ debug('Regenerating stored HTTP header string for request');
+ req._header = null;
+ req._implicitHeader();
+ if (req.output && req.output.length > 0) {
+ // Node < 12
+ debug('Patching connection write() output buffer with updated header');
+ first = req.output[0];
+ endOfHeaders = first.indexOf('\r\n\r\n') + 4;
+ req.output[0] = req._header + first.substring(endOfHeaders);
+ debug('Output buffer: %o', req.output);
+ }
+ else if (req.outputData && req.outputData.length > 0) {
+ // Node >= 12
+ debug('Patching connection write() output buffer with updated header');
+ first = req.outputData[0].data;
+ endOfHeaders = first.indexOf('\r\n\r\n') + 4;
+ req.outputData[0].data =
+ req._header + first.substring(endOfHeaders);
+ debug('Output buffer: %o', req.outputData[0].data);
+ }
+ }
+ // Wait for the socket's `connect` event, so that this `callback()`
+ // function throws instead of the `http` request machinery. This is
+ // important for i.e. `PacProxyAgent` which determines a failed proxy
+ // connection via the `callback()` function throwing.
+ yield once_1.default(socket, 'connect');
+ return socket;
+ });
+ }
+}
+exports.default = HttpProxyAgent;
+//# sourceMappingURL=agent.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.js.map b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.js.map
new file mode 100644
index 0000000000000..7a407620d8e50
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/agent.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"agent.js","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,8CAAsB;AACtB,8CAAsB;AACtB,8CAAsB;AACtB,kDAAgC;AAChC,6DAAqC;AACrC,2CAAkE;AAGlE,MAAM,KAAK,GAAG,eAAW,CAAC,kBAAkB,CAAC,CAAC;AAY9C,SAAS,OAAO,CAAC,QAAwB;IACxC,OAAO,OAAO,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3E,CAAC;AAED;;;;;GAKG;AACH,MAAqB,cAAe,SAAQ,kBAAK;IAIhD,YAAY,KAAqC;QAChD,IAAI,IAA2B,CAAC;QAChC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC9B,IAAI,GAAG,aAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACxB;aAAM;YACN,IAAI,GAAG,KAAK,CAAC;SACb;QACD,IAAI,CAAC,IAAI,EAAE;YACV,MAAM,IAAI,KAAK,CACd,8DAA8D,CAC9D,CAAC;SACF;QACD,KAAK,CAAC,0CAA0C,EAAE,IAAI,CAAC,CAAC;QACxD,KAAK,CAAC,IAAI,CAAC,CAAC;QAEZ,MAAM,KAAK,qBAA+B,IAAI,CAAE,CAAC;QAEjD,wDAAwD;QACxD,uBAAuB;QACvB,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAE/D,+DAA+D;QAC/D,KAAK,CAAC,IAAI,GAAG,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,IAAI,CAAC;QAC1C,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACnC,KAAK,CAAC,IAAI,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;SACtC;QACD,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC9B,KAAK,CAAC,IAAI,GAAG,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;SACzC;QAED,IAAI,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC7B,kEAAkE;YAClE,8DAA8D;YAC9D,iEAAiE;YACjE,8BAA8B;YAC9B,OAAO,KAAK,CAAC,IAAI,CAAC;YAClB,OAAO,KAAK,CAAC,QAAQ,CAAC;SACtB;QAED,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACpB,CAAC;IAED;;;;;OAKG;IACG,QAAQ,CACb,GAAgC,EAChC,IAAoB;;YAEpB,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,GAAG,IAAI,CAAC;YACpC,MAAM,MAAM,GAAG,aAAG,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC;YAEnC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,OAAO,CAAC;aAC1B;YAED,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE;gBACrB,MAAM,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC;aACrD;YAED,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,EAAE;gBAC5C,MAAM,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aAChC;YAED,IAAI,MAAM,CAAC,IAAI,KAAK,IAAI,EAAE;gBACzB,yDAAyD;gBACzD,2CAA2C;gBAC3C,OAAO,MAAM,CAAC,IAAI,CAAC;aACnB;YAED,0DAA0D;YAC1D,0DAA0D;YAC1D,GAAG,CAAC,IAAI,GAAG,aAAG,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;YAE9B,wDAAwD;YACxD,IAAI,KAAK,CAAC,IAAI,EAAE;gBACf,GAAG,CAAC,SAAS,CACZ,qBAAqB,EACrB,SAAS,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CACrD,CAAC;aACF;YAED,kDAAkD;YAClD,IAAI,MAAkB,CAAC;YACvB,IAAI,WAAW,EAAE;gBAChB,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA8B,CAAC,CAAC;aACrD;iBAAM;gBACN,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA2B,CAAC,CAAC;aAClD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,kEAAkE;YAClE,IAAI,GAAG,CAAC,OAAO,EAAE;gBAChB,IAAI,KAAa,CAAC;gBAClB,IAAI,YAAoB,CAAC;gBACzB,KAAK,CAAC,oDAAoD,CAAC,CAAC;gBAC5D,GAAG,CAAC,OAAO,GAAG,IAAI,CAAC;gBACnB,GAAG,CAAC,eAAe,EAAE,CAAC;gBACtB,IAAI,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE;oBACxC,YAAY;oBACZ,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;oBACtB,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,MAAM,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC5D,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,MAAM,CAAC,CAAC;iBACvC;qBAAM,IAAI,GAAG,CAAC,UAAU,IAAI,GAAG,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE;oBACvD,aAAa;oBACb,KAAK,CACJ,+DAA+D,CAC/D,CAAC;oBACF,KAAK,GAAG,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;oBAC/B,YAAY,GAAG,KAAK,CAAC,OAAO,CAAC,UAAU,CAAC,GAAG,CAAC,CAAC;oBAC7C,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI;wBACrB,GAAG,CAAC,OAAO,GAAG,KAAK,CAAC,SAAS,CAAC,YAAY,CAAC,CAAC;oBAC7C,KAAK,CAAC,mBAAmB,EAAE,GAAG,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;iBACnD;aACD;YAED,mEAAmE;YACnE,mEAAmE;YACnE,qEAAqE;YACrE,qDAAqD;YACrD,MAAM,cAAI,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;YAE9B,OAAO,MAAM,CAAC;QACf,CAAC;KAAA;CACD;AA1ID,iCA0IC"}
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.d.ts b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.d.ts
new file mode 100644
index 0000000000000..24bdb52efcedc
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.d.ts
@@ -0,0 +1,21 @@
+///
+import net from 'net';
+import tls from 'tls';
+import { Url } from 'url';
+import { AgentOptions } from 'agent-base';
+import _HttpProxyAgent from './agent';
+declare function createHttpProxyAgent(opts: string | createHttpProxyAgent.HttpProxyAgentOptions): _HttpProxyAgent;
+declare namespace createHttpProxyAgent {
+ interface BaseHttpProxyAgentOptions {
+ secureProxy?: boolean;
+ host?: string | null;
+ path?: string | null;
+ port?: string | number | null;
+ }
+ export interface HttpProxyAgentOptions extends AgentOptions, BaseHttpProxyAgentOptions, Partial> {
+ }
+ export type HttpProxyAgent = _HttpProxyAgent;
+ export const HttpProxyAgent: typeof _HttpProxyAgent;
+ export {};
+}
+export = createHttpProxyAgent;
diff --git a/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.js b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.js
new file mode 100644
index 0000000000000..0a71180594605
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.js
@@ -0,0 +1,14 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+const agent_1 = __importDefault(require("./agent"));
+function createHttpProxyAgent(opts) {
+ return new agent_1.default(opts);
+}
+(function (createHttpProxyAgent) {
+ createHttpProxyAgent.HttpProxyAgent = agent_1.default;
+ createHttpProxyAgent.prototype = agent_1.default.prototype;
+})(createHttpProxyAgent || (createHttpProxyAgent = {}));
+module.exports = createHttpProxyAgent;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.js.map b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.js.map
new file mode 100644
index 0000000000000..e07dae5b08455
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/http-proxy-agent/dist/index.js.map
@@ -0,0 +1 @@
+{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;AAIA,oDAAsC;AAEtC,SAAS,oBAAoB,CAC5B,IAAyD;IAEzD,OAAO,IAAI,eAAe,CAAC,IAAI,CAAC,CAAC;AAClC,CAAC;AAED,WAAU,oBAAoB;IAmBhB,mCAAc,GAAG,eAAe,CAAC;IAE9C,oBAAoB,CAAC,SAAS,GAAG,eAAe,CAAC,SAAS,CAAC;AAC5D,CAAC,EAtBS,oBAAoB,KAApB,oBAAoB,QAsB7B;AAED,iBAAS,oBAAoB,CAAC"}
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/http-proxy-agent/package.json b/node_modules/node-gyp/node_modules/http-proxy-agent/package.json
new file mode 100644
index 0000000000000..870dd5d8af267
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/http-proxy-agent/package.json
@@ -0,0 +1,57 @@
+{
+ "name": "http-proxy-agent",
+ "version": "4.0.1",
+ "description": "An HTTP(s) proxy `http.Agent` implementation for HTTP",
+ "main": "./dist/index.js",
+ "types": "./dist/index.d.ts",
+ "files": [
+ "dist"
+ ],
+ "scripts": {
+ "prebuild": "rimraf dist",
+ "build": "tsc",
+ "test": "mocha",
+ "test-lint": "eslint src --ext .js,.ts",
+ "prepublishOnly": "npm run build"
+ },
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/TooTallNate/node-http-proxy-agent.git"
+ },
+ "keywords": [
+ "http",
+ "proxy",
+ "endpoint",
+ "agent"
+ ],
+ "author": "Nathan Rajlich (http://n8.io/)",
+ "license": "MIT",
+ "bugs": {
+ "url": "https://github.com/TooTallNate/node-http-proxy-agent/issues"
+ },
+ "dependencies": {
+ "@tootallnate/once": "1",
+ "agent-base": "6",
+ "debug": "4"
+ },
+ "devDependencies": {
+ "@types/debug": "4",
+ "@types/node": "^12.12.11",
+ "@typescript-eslint/eslint-plugin": "1.6.0",
+ "@typescript-eslint/parser": "1.1.0",
+ "eslint": "5.16.0",
+ "eslint-config-airbnb": "17.1.0",
+ "eslint-config-prettier": "4.1.0",
+ "eslint-import-resolver-typescript": "1.1.1",
+ "eslint-plugin-import": "2.16.0",
+ "eslint-plugin-jsx-a11y": "6.2.1",
+ "eslint-plugin-react": "7.12.4",
+ "mocha": "^6.2.2",
+ "proxy": "1",
+ "rimraf": "^3.0.0",
+ "typescript": "^3.5.3"
+ },
+ "engines": {
+ "node": ">= 6"
+ }
+}
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE b/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE
new file mode 100644
index 0000000000000..8d28acf866d93
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js
new file mode 100644
index 0000000000000..3675dd8ae981a
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/agent.js
@@ -0,0 +1,194 @@
+'use strict'
+const LRU = require('lru-cache')
+const url = require('url')
+const isLambda = require('is-lambda')
+
+const AGENT_CACHE = new LRU({ max: 50 })
+const HttpAgent = require('agentkeepalive')
+const HttpsAgent = HttpAgent.HttpsAgent
+
+module.exports = getAgent
+
+const getAgentTimeout = timeout =>
+ typeof timeout !== 'number' || !timeout ? 0 : timeout + 1
+
+const getMaxSockets = maxSockets => maxSockets || 15
+
+function getAgent (uri, opts) {
+ const parsedUri = new url.URL(typeof uri === 'string' ? uri : uri.url)
+ const isHttps = parsedUri.protocol === 'https:'
+ const pxuri = getProxyUri(parsedUri.href, opts)
+
+ // If opts.timeout is zero, set the agentTimeout to zero as well. A timeout
+ // of zero disables the timeout behavior (OS limits still apply). Else, if
+ // opts.timeout is a non-zero value, set it to timeout + 1, to ensure that
+ // the node-fetch-npm timeout will always fire first, giving us more
+ // consistent errors.
+ const agentTimeout = getAgentTimeout(opts.timeout)
+ const agentMaxSockets = getMaxSockets(opts.maxSockets)
+
+ const key = [
+ `https:${isHttps}`,
+ pxuri
+ ? `proxy:${pxuri.protocol}//${pxuri.host}:${pxuri.port}`
+ : '>no-proxy<',
+ `local-address:${opts.localAddress || '>no-local-address<'}`,
+ `strict-ssl:${isHttps ? opts.rejectUnauthorized : '>no-strict-ssl<'}`,
+ `ca:${(isHttps && opts.ca) || '>no-ca<'}`,
+ `cert:${(isHttps && opts.cert) || '>no-cert<'}`,
+ `key:${(isHttps && opts.key) || '>no-key<'}`,
+ `timeout:${agentTimeout}`,
+ `maxSockets:${agentMaxSockets}`,
+ ].join(':')
+
+ if (opts.agent != null) { // `agent: false` has special behavior!
+ return opts.agent
+ }
+
+ // keep alive in AWS lambda makes no sense
+ const lambdaAgent = !isLambda ? null
+ : isHttps ? require('https').globalAgent
+ : require('http').globalAgent
+
+ if (isLambda && !pxuri)
+ return lambdaAgent
+
+ if (AGENT_CACHE.peek(key))
+ return AGENT_CACHE.get(key)
+
+ if (pxuri) {
+ const pxopts = isLambda ? {
+ ...opts,
+ agent: lambdaAgent,
+ } : opts
+ const proxy = getProxy(pxuri, pxopts, isHttps)
+ AGENT_CACHE.set(key, proxy)
+ return proxy
+ }
+
+ const agent = isHttps ? new HttpsAgent({
+ maxSockets: agentMaxSockets,
+ ca: opts.ca,
+ cert: opts.cert,
+ key: opts.key,
+ localAddress: opts.localAddress,
+ rejectUnauthorized: opts.rejectUnauthorized,
+ timeout: agentTimeout,
+ }) : new HttpAgent({
+ maxSockets: agentMaxSockets,
+ localAddress: opts.localAddress,
+ timeout: agentTimeout,
+ })
+ AGENT_CACHE.set(key, agent)
+ return agent
+}
+
+function checkNoProxy (uri, opts) {
+ const host = new url.URL(uri).hostname.split('.').reverse()
+ let noproxy = (opts.noProxy || getProcessEnv('no_proxy'))
+ if (typeof noproxy === 'string')
+ noproxy = noproxy.split(/\s*,\s*/g)
+
+ return noproxy && noproxy.some(no => {
+ const noParts = no.split('.').filter(x => x).reverse()
+ if (!noParts.length)
+ return false
+ for (let i = 0; i < noParts.length; i++) {
+ if (host[i] !== noParts[i])
+ return false
+ }
+ return true
+ })
+}
+
+module.exports.getProcessEnv = getProcessEnv
+
+function getProcessEnv (env) {
+ if (!env)
+ return
+
+ let value
+
+ if (Array.isArray(env)) {
+ for (const e of env) {
+ value = process.env[e] ||
+ process.env[e.toUpperCase()] ||
+ process.env[e.toLowerCase()]
+ if (typeof value !== 'undefined')
+ break
+ }
+ }
+
+ if (typeof env === 'string') {
+ value = process.env[env] ||
+ process.env[env.toUpperCase()] ||
+ process.env[env.toLowerCase()]
+ }
+
+ return value
+}
+
+module.exports.getProxyUri = getProxyUri
+function getProxyUri (uri, opts) {
+ const protocol = new url.URL(uri).protocol
+
+ const proxy = opts.proxy ||
+ (
+ protocol === 'https:' &&
+ getProcessEnv('https_proxy')
+ ) ||
+ (
+ protocol === 'http:' &&
+ getProcessEnv(['https_proxy', 'http_proxy', 'proxy'])
+ )
+ if (!proxy)
+ return null
+
+ const parsedProxy = (typeof proxy === 'string') ? new url.URL(proxy) : proxy
+
+ return !checkNoProxy(uri, opts) && parsedProxy
+}
+
+const getAuth = u =>
+ u.username && u.password ? decodeURIComponent(`${u.username}:${u.password}`)
+ : u.username ? decodeURIComponent(u.username)
+ : null
+
+const getPath = u => u.pathname + u.search + u.hash
+
+const HttpProxyAgent = require('http-proxy-agent')
+const HttpsProxyAgent = require('https-proxy-agent')
+const SocksProxyAgent = require('socks-proxy-agent')
+module.exports.getProxy = getProxy
+function getProxy (proxyUrl, opts, isHttps) {
+ const popts = {
+ host: proxyUrl.hostname,
+ port: proxyUrl.port,
+ protocol: proxyUrl.protocol,
+ path: getPath(proxyUrl),
+ auth: getAuth(proxyUrl),
+ ca: opts.ca,
+ cert: opts.cert,
+ key: opts.key,
+ timeout: getAgentTimeout(opts.timeout),
+ localAddress: opts.localAddress,
+ maxSockets: getMaxSockets(opts.maxSockets),
+ rejectUnauthorized: opts.rejectUnauthorized,
+ }
+
+ if (proxyUrl.protocol === 'http:' || proxyUrl.protocol === 'https:') {
+ if (!isHttps)
+ return new HttpProxyAgent(popts)
+ else
+ return new HttpsProxyAgent(popts)
+ } else if (proxyUrl.protocol.startsWith('socks'))
+ return new SocksProxyAgent(popts)
+ else {
+ throw Object.assign(
+ new Error(`unsupported proxy protocol: '${proxyUrl.protocol}'`),
+ {
+ url: proxyUrl.href,
+ }
+ )
+ }
+}
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js
new file mode 100644
index 0000000000000..a2acea156ee6f
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js
@@ -0,0 +1,460 @@
+const { Request, Response } = require('minipass-fetch')
+const Minipass = require('minipass')
+const MinipassCollect = require('minipass-collect')
+const MinipassFlush = require('minipass-flush')
+const MinipassPipeline = require('minipass-pipeline')
+const cacache = require('cacache')
+const url = require('url')
+
+const CachePolicy = require('./policy.js')
+const cacheKey = require('./key.js')
+const remote = require('../remote.js')
+
+const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop)
+
+// maximum amount of data we will buffer into memory
+// if we'll exceed this, we switch to streaming
+const MAX_MEM_SIZE = 5 * 1024 * 1024 // 5MB
+
+// allow list for request headers that will be written to the cache index
+// note: we will also store any request headers
+// that are named in a response's vary header
+const KEEP_REQUEST_HEADERS = [
+ 'accept-charset',
+ 'accept-encoding',
+ 'accept-language',
+ 'accept',
+ 'cache-control',
+]
+
+// allow list for response headers that will be written to the cache index
+// note: we must not store the real response's age header, or when we load
+// a cache policy based on the metadata it will think the cached response
+// is always stale
+const KEEP_RESPONSE_HEADERS = [
+ 'cache-control',
+ 'content-encoding',
+ 'content-language',
+ 'content-type',
+ 'date',
+ 'etag',
+ 'expires',
+ 'last-modified',
+ 'location',
+ 'pragma',
+ 'vary',
+]
+
+// return an object containing all metadata to be written to the index
+const getMetadata = (request, response, options) => {
+ const metadata = {
+ time: Date.now(),
+ url: request.url,
+ reqHeaders: {},
+ resHeaders: {},
+ }
+
+ // only save the status if it's not a 200 or 304
+ if (response.status !== 200 && response.status !== 304)
+ metadata.status = response.status
+
+ for (const name of KEEP_REQUEST_HEADERS) {
+ if (request.headers.has(name))
+ metadata.reqHeaders[name] = request.headers.get(name)
+ }
+
+ // if the request's host header differs from the host in the url
+ // we need to keep it, otherwise it's just noise and we ignore it
+ const host = request.headers.get('host')
+ const parsedUrl = new url.URL(request.url)
+ if (host && parsedUrl.host !== host)
+ metadata.reqHeaders.host = host
+
+ // if the response has a vary header, make sure
+ // we store the relevant request headers too
+ if (response.headers.has('vary')) {
+ const vary = response.headers.get('vary')
+ // a vary of "*" means every header causes a different response.
+ // in that scenario, we do not include any additional headers
+ // as the freshness check will always fail anyway and we don't
+ // want to bloat the cache indexes
+ if (vary !== '*') {
+ // copy any other request headers that will vary the response
+ const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/)
+ for (const name of varyHeaders) {
+ // explicitly ignore accept-encoding here
+ if (name !== 'accept-encoding' && request.headers.has(name))
+ metadata.reqHeaders[name] = request.headers.get(name)
+ }
+ }
+ }
+
+ for (const name of KEEP_RESPONSE_HEADERS) {
+ if (response.headers.has(name))
+ metadata.resHeaders[name] = response.headers.get(name)
+ }
+
+ // we only store accept-encoding and content-encoding if the user
+ // has disabled automatic compression and decompression in minipass-fetch
+ // since if it's enabled (the default) then the content will have
+ // already been decompressed making the header a lie
+ if (options.compress === false) {
+ metadata.reqHeaders['accept-encoding'] = request.headers.get('accept-encoding')
+ metadata.resHeaders['content-encoding'] = response.headers.get('content-encoding')
+ }
+
+ return metadata
+}
+
+// symbols used to hide objects that may be lazily evaluated in a getter
+const _request = Symbol('request')
+const _response = Symbol('response')
+const _policy = Symbol('policy')
+
+class CacheEntry {
+ constructor ({ entry, request, response, options }) {
+ if (entry) {
+ this.key = entry.key
+ this.entry = entry
+ // previous versions of this module didn't write an explicit timestamp in
+ // the metadata, so fall back to the entry's timestamp. we can't use the
+ // entry timestamp to determine staleness because cacache will update it
+ // when it verifies its data
+ this.entry.metadata.time = this.entry.metadata.time || this.entry.time
+ } else
+ this.key = cacheKey(request)
+
+ this.options = options
+
+ // these properties are behind getters that lazily evaluate
+ this[_request] = request
+ this[_response] = response
+ this[_policy] = null
+ }
+
+ // returns a CacheEntry instance that satisfies the given request
+ // or undefined if no existing entry satisfies
+ static async find (request, options) {
+ try {
+ // compacts the index and returns an array of unique entries
+ var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => {
+ const entryA = new CacheEntry({ entry: A, options })
+ const entryB = new CacheEntry({ entry: B, options })
+ return entryA.policy.satisfies(entryB.request)
+ }, {
+ validateEntry: (entry) => {
+ // if an integrity is null, it needs to have a status specified
+ if (entry.integrity === null)
+ return !!(entry.metadata && entry.metadata.status)
+
+ return true
+ },
+ })
+ } catch (err) {
+ // if the compact request fails, ignore the error and return
+ return
+ }
+
+ // a cache mode of 'reload' means to behave as though we have no cache
+ // on the way to the network. return undefined to allow cacheFetch to
+ // create a brand new request no matter what.
+ if (options.cache === 'reload')
+ return
+
+ // find the specific entry that satisfies the request
+ let match
+ for (const entry of matches) {
+ const _entry = new CacheEntry({
+ entry,
+ options,
+ })
+
+ if (_entry.policy.satisfies(request)) {
+ match = _entry
+ break
+ }
+ }
+
+ return match
+ }
+
+ // if the user made a PUT/POST/PATCH then we invalidate our
+ // cache for the same url by deleting the index entirely
+ static async invalidate (request, options) {
+ const key = cacheKey(request)
+ try {
+ await cacache.rm.entry(options.cachePath, key, { removeFully: true })
+ } catch (err) {
+ // ignore errors
+ }
+ }
+
+ get request () {
+ if (!this[_request]) {
+ this[_request] = new Request(this.entry.metadata.url, {
+ method: 'GET',
+ headers: this.entry.metadata.reqHeaders,
+ })
+ }
+
+ return this[_request]
+ }
+
+ get response () {
+ if (!this[_response]) {
+ this[_response] = new Response(null, {
+ url: this.entry.metadata.url,
+ counter: this.options.counter,
+ status: this.entry.metadata.status || 200,
+ headers: {
+ ...this.entry.metadata.resHeaders,
+ 'content-length': this.entry.size,
+ },
+ })
+ }
+
+ return this[_response]
+ }
+
+ get policy () {
+ if (!this[_policy]) {
+ this[_policy] = new CachePolicy({
+ entry: this.entry,
+ request: this.request,
+ response: this.response,
+ options: this.options,
+ })
+ }
+
+ return this[_policy]
+ }
+
+ // wraps the response in a pipeline that stores the data
+ // in the cache while the user consumes it
+ async store (status) {
+ // if we got a status other than 200, 301, or 308,
+ // or the CachePolicy forbid storage, append the
+ // cache status header and return it untouched
+ if (this.request.method !== 'GET' || ![200, 301, 308].includes(this.response.status) || !this.policy.storable()) {
+ this.response.headers.set('x-local-cache-status', 'skip')
+ return this.response
+ }
+
+ const size = this.response.headers.get('content-length')
+ const fitsInMemory = !!size && Number(size) < MAX_MEM_SIZE
+ const shouldBuffer = this.options.memoize !== false && fitsInMemory
+ const cacheOpts = {
+ algorithms: this.options.algorithms,
+ metadata: getMetadata(this.request, this.response, this.options),
+ size,
+ memoize: fitsInMemory && this.options.memoize,
+ }
+
+ let body = null
+ // we only set a body if the status is a 200, redirects are
+ // stored as metadata only
+ if (this.response.status === 200) {
+ let cacheWriteResolve, cacheWriteReject
+ const cacheWritePromise = new Promise((resolve, reject) => {
+ cacheWriteResolve = resolve
+ cacheWriteReject = reject
+ })
+
+ body = new MinipassPipeline(new MinipassFlush({
+ flush () {
+ return cacheWritePromise
+ },
+ }))
+
+ let abortStream, onResume
+ if (shouldBuffer) {
+ // if the result fits in memory, use a collect stream to gather
+ // the response and write it to cacache while also passing it through
+ // to the user
+ onResume = () => {
+ const collector = new MinipassCollect.PassThrough()
+ abortStream = collector
+ collector.on('collect', (data) => {
+ // TODO if the cache write fails, log a warning but return the response anyway
+ cacache.put(this.options.cachePath, this.key, data, cacheOpts).then(cacheWriteResolve, cacheWriteReject)
+ })
+ body.unshift(collector)
+ body.unshift(this.response.body)
+ }
+ } else {
+ // if it does not fit in memory, create a tee stream and use
+ // that to pipe to both the cache and the user simultaneously
+ onResume = () => {
+ const tee = new Minipass()
+ const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts)
+ abortStream = cacheStream
+ tee.pipe(cacheStream)
+ // TODO if the cache write fails, log a warning but return the response anyway
+ cacheStream.promise().then(cacheWriteResolve, cacheWriteReject)
+ body.unshift(tee)
+ body.unshift(this.response.body)
+ }
+ }
+
+ body.once('resume', onResume)
+ body.once('end', () => body.removeListener('resume', onResume))
+ this.response.body.on('error', (err) => {
+ // the abortStream will either be a MinipassCollect if we buffer
+ // or a cacache write stream, either way be sure to listen for
+ // errors from the actual response and avoid writing data that we
+ // know to be invalid to the cache
+ abortStream.destroy(err)
+ })
+ } else
+ await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts)
+
+ // note: we do not set the x-local-cache-hash header because we do not know
+ // the hash value until after the write to the cache completes, which doesn't
+ // happen until after the response has been sent and it's too late to write
+ // the header anyway
+ this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
+ this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
+ this.response.headers.set('x-local-cache-mode', shouldBuffer ? 'buffer' : 'stream')
+ this.response.headers.set('x-local-cache-status', status)
+ this.response.headers.set('x-local-cache-time', new Date().toISOString())
+ const newResponse = new Response(body, {
+ url: this.response.url,
+ status: this.response.status,
+ headers: this.response.headers,
+ counter: this.options.counter,
+ })
+ return newResponse
+ }
+
+ // use the cached data to create a response and return it
+ async respond (method, options, status) {
+ let response
+ const size = Number(this.response.headers.get('content-length'))
+ const fitsInMemory = !!size && size < MAX_MEM_SIZE
+ const shouldBuffer = this.options.memoize !== false && fitsInMemory
+ if (method === 'HEAD' || [301, 308].includes(this.response.status)) {
+ // if the request is a HEAD, or the response is a redirect,
+ // then the metadata in the entry already includes everything
+ // we need to build a response
+ response = this.response
+ } else {
+ // we're responding with a full cached response, so create a body
+ // that reads from cacache and attach it to a new Response
+ const body = new Minipass()
+ const removeOnResume = () => body.removeListener('resume', onResume)
+ let onResume
+ if (shouldBuffer) {
+ onResume = async () => {
+ removeOnResume()
+ try {
+ const content = await cacache.get.byDigest(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize })
+ body.end(content)
+ } catch (err) {
+ if (err.code === 'EINTEGRITY')
+ await cacache.rm.content(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize })
+ if (err.code === 'ENOENT' || err.code === 'EINTEGRITY')
+ await CacheEntry.invalidate(this.request, this.options)
+ body.emit('error', err)
+ }
+ }
+ } else {
+ onResume = () => {
+ const cacheStream = cacache.get.stream.byDigest(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize })
+ cacheStream.on('error', async (err) => {
+ cacheStream.pause()
+ if (err.code === 'EINTEGRITY')
+ await cacache.rm.content(this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize })
+ if (err.code === 'ENOENT' || err.code === 'EINTEGRITY')
+ await CacheEntry.invalidate(this.request, this.options)
+ body.emit('error', err)
+ cacheStream.resume()
+ })
+ cacheStream.pipe(body)
+ }
+ }
+
+ body.once('resume', onResume)
+ body.once('end', removeOnResume)
+ response = new Response(body, {
+ url: this.entry.metadata.url,
+ counter: options.counter,
+ status: 200,
+ headers: {
+ ...this.policy.responseHeaders(),
+ },
+ })
+ }
+
+ response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
+ response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity))
+ response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
+ response.headers.set('x-local-cache-mode', shouldBuffer ? 'buffer' : 'stream')
+ response.headers.set('x-local-cache-status', status)
+ response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString())
+ return response
+ }
+
+ // use the provided request along with this cache entry to
+ // revalidate the stored response. returns a response, either
+ // from the cache or from the update
+ async revalidate (request, options) {
+ const revalidateRequest = new Request(request, {
+ headers: this.policy.revalidationHeaders(request),
+ })
+
+ try {
+ // NOTE: be sure to remove the headers property from the
+ // user supplied options, since we have already defined
+ // them on the new request object. if they're still in the
+ // options then those will overwrite the ones from the policy
+ var response = await remote(revalidateRequest, {
+ ...options,
+ headers: undefined,
+ })
+ } catch (err) {
+ // if the network fetch fails, return the stale
+ // cached response unless it has a cache-control
+ // of 'must-revalidate'
+ if (!this.policy.mustRevalidate)
+ return this.respond(request.method, options, 'stale')
+
+ throw err
+ }
+
+ if (this.policy.revalidated(revalidateRequest, response)) {
+ // we got a 304, write a new index to the cache and respond from cache
+ const metadata = getMetadata(request, response, options)
+ // 304 responses do not include headers that are specific to the response data
+ // since they do not include a body, so we copy values for headers that were
+ // in the old cache entry to the new one, if the new metadata does not already
+ // include that header
+ for (const name of KEEP_RESPONSE_HEADERS) {
+ if (!hasOwnProperty(metadata.resHeaders, name) && hasOwnProperty(this.entry.metadata.resHeaders, name))
+ metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
+ }
+
+ try {
+ await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, {
+ size: this.entry.size,
+ metadata,
+ })
+ } catch (err) {
+ // if updating the cache index fails, we ignore it and
+ // respond anyway
+ }
+ return this.respond(request.method, options, 'revalidated')
+ }
+
+ // if we got a modified response, create a new entry based on it
+ const newEntry = new CacheEntry({
+ request,
+ response,
+ options,
+ })
+
+ // respond with the new entry while writing it to the cache
+ return newEntry.store('updated')
+ }
+}
+
+module.exports = CacheEntry
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js
new file mode 100644
index 0000000000000..31e97c4b033c0
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js
@@ -0,0 +1,10 @@
+class NotCachedError extends Error {
+ constructor (url) {
+ super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`)
+ this.code = 'ENOTCACHED'
+ }
+}
+
+module.exports = {
+ NotCachedError,
+}
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js
new file mode 100644
index 0000000000000..cca93d9b4eb5d
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js
@@ -0,0 +1,45 @@
+const { NotCachedError } = require('./errors.js')
+const CacheEntry = require('./entry.js')
+const remote = require('../remote.js')
+
+// do whatever is necessary to get a Response and return it
+const cacheFetch = async (request, options) => {
+ // try to find a cached entry that satisfies this request
+ const entry = await CacheEntry.find(request, options)
+ if (!entry) {
+ // no cached result, if the cache mode is 'only-if-cached' that's a failure
+ if (options.cache === 'only-if-cached')
+ throw new NotCachedError(request.url)
+
+ // otherwise, we make a request, store it and return it
+ const response = await remote(request, options)
+ const entry = new CacheEntry({ request, response, options })
+ return entry.store('miss')
+ }
+
+ // we have a cached response that satisfies this request, however if the cache
+ // mode is 'no-cache' then we send the revalidation request no matter what
+ if (options.cache === 'no-cache')
+ return entry.revalidate(request, options)
+
+ // if the cached entry is not stale, or if the cache mode is 'force-cache' or
+ // 'only-if-cached' we can respond with the cached entry. set the status
+ // based on the result of needsRevalidation and respond
+ const _needsRevalidation = entry.policy.needsRevalidation(request)
+ if (options.cache === 'force-cache' ||
+ options.cache === 'only-if-cached' ||
+ !_needsRevalidation)
+ return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit')
+
+ // if we got here, the cache entry is stale so revalidate it
+ return entry.revalidate(request, options)
+}
+
+cacheFetch.invalidate = async (request, options) => {
+ if (!options.cachePath)
+ return
+
+ return CacheEntry.invalidate(request, options)
+}
+
+module.exports = cacheFetch
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js
new file mode 100644
index 0000000000000..f7684d562b7fa
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js
@@ -0,0 +1,17 @@
+const { URL, format } = require('url')
+
+// options passed to url.format() when generating a key
+const formatOptions = {
+ auth: false,
+ fragment: false,
+ search: true,
+ unicode: false,
+}
+
+// returns a string to be used as the cache key for the Request
+const cacheKey = (request) => {
+ const parsed = new URL(request.url)
+ return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}`
+}
+
+module.exports = cacheKey
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js
new file mode 100644
index 0000000000000..e0959f64ddf9d
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js
@@ -0,0 +1,161 @@
+const CacheSemantics = require('http-cache-semantics')
+const Negotiator = require('negotiator')
+const ssri = require('ssri')
+
+// HACK: negotiator lazy loads several of its own modules
+// as a micro optimization. we need to be sure that they're
+// in memory as soon as possible at startup so that we do
+// not try to lazy load them after the directory has been
+// retired during a self update of the npm CLI, we do this
+// by calling all of the methods that trigger a lazy load
+// on a fake instance.
+const preloadNegotiator = new Negotiator({ headers: {} })
+preloadNegotiator.charsets()
+preloadNegotiator.encodings()
+preloadNegotiator.languages()
+preloadNegotiator.mediaTypes()
+
+// options passed to http-cache-semantics constructor
+const policyOptions = {
+ shared: false,
+ ignoreCargoCult: true,
+}
+
+// a fake empty response, used when only testing the
+// request for storability
+const emptyResponse = { status: 200, headers: {} }
+
+// returns a plain object representation of the Request
+const requestObject = (request) => {
+ const _obj = {
+ method: request.method,
+ url: request.url,
+ headers: {},
+ }
+
+ request.headers.forEach((value, key) => {
+ _obj.headers[key] = value
+ })
+
+ return _obj
+}
+
+// returns a plain object representation of the Response
+const responseObject = (response) => {
+ const _obj = {
+ status: response.status,
+ headers: {},
+ }
+
+ response.headers.forEach((value, key) => {
+ _obj.headers[key] = value
+ })
+
+ return _obj
+}
+
+class CachePolicy {
+ constructor ({ entry, request, response, options }) {
+ this.entry = entry
+ this.request = requestObject(request)
+ this.response = responseObject(response)
+ this.options = options
+ this.policy = new CacheSemantics(this.request, this.response, policyOptions)
+
+ if (this.entry) {
+ // if we have an entry, copy the timestamp to the _responseTime
+ // this is necessary because the CacheSemantics constructor forces
+ // the value to Date.now() which means a policy created from a
+ // cache entry is likely to always identify itself as stale
+ this.policy._responseTime = this.entry.metadata.time
+ }
+ }
+
+ // static method to quickly determine if a request alone is storable
+ static storable (request, options) {
+ // no cachePath means no caching
+ if (!options.cachePath)
+ return false
+
+ // user explicitly asked not to cache
+ if (options.cache === 'no-store')
+ return false
+
+ // we only cache GET and HEAD requests
+ if (!['GET', 'HEAD'].includes(request.method))
+ return false
+
+ // otherwise, let http-cache-semantics make the decision
+ // based on the request's headers
+ const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions)
+ return policy.storable()
+ }
+
+ // returns true if the policy satisfies the request
+ satisfies (request) {
+ const _req = requestObject(request)
+ if (this.request.headers.host !== _req.headers.host)
+ return false
+
+ const negotiatorA = new Negotiator(this.request)
+ const negotiatorB = new Negotiator(_req)
+
+ if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes()))
+ return false
+
+ if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages()))
+ return false
+
+ if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings()))
+ return false
+
+ if (this.options.integrity)
+ return ssri.parse(this.options.integrity).match(this.entry.integrity)
+
+ return true
+ }
+
+ // returns true if the request and response allow caching
+ storable () {
+ return this.policy.storable()
+ }
+
+ // NOTE: this is a hack to avoid parsing the cache-control
+ // header ourselves, it returns true if the response's
+ // cache-control contains must-revalidate
+ get mustRevalidate () {
+ return !!this.policy._rescc['must-revalidate']
+ }
+
+ // returns true if the cached response requires revalidation
+ // for the given request
+ needsRevalidation (request) {
+ const _req = requestObject(request)
+ // force method to GET because we only cache GETs
+ // but can serve a HEAD from a cached GET
+ _req.method = 'GET'
+ return !this.policy.satisfiesWithoutRevalidation(_req)
+ }
+
+ responseHeaders () {
+ return this.policy.responseHeaders()
+ }
+
+ // returns a new object containing the appropriate headers
+ // to send a revalidation request
+ revalidationHeaders (request) {
+ const _req = requestObject(request)
+ return this.policy.revalidationHeaders(_req)
+ }
+
+ // returns true if the request/response was revalidated
+ // successfully. returns false if a new response was received
+ revalidated (request, response) {
+ const _req = requestObject(request)
+ const _res = responseObject(response)
+ const policy = this.policy.revalidatedPolicy(_req, _res)
+ return !policy.modified
+ }
+}
+
+module.exports = CachePolicy
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js
new file mode 100644
index 0000000000000..dfded79295da1
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js
@@ -0,0 +1,100 @@
+'use strict'
+
+const { FetchError, Request, isRedirect } = require('minipass-fetch')
+const url = require('url')
+
+const CachePolicy = require('./cache/policy.js')
+const cache = require('./cache/index.js')
+const remote = require('./remote.js')
+
+// given a Request, a Response and user options
+// return true if the response is a redirect that
+// can be followed. we throw errors that will result
+// in the fetch being rejected if the redirect is
+// possible but invalid for some reason
+const canFollowRedirect = (request, response, options) => {
+ if (!isRedirect(response.status))
+ return false
+
+ if (options.redirect === 'manual')
+ return false
+
+ if (options.redirect === 'error')
+ throw new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect', { code: 'ENOREDIRECT' })
+
+ if (!response.headers.has('location'))
+ throw new FetchError(`redirect location header missing for: ${request.url}`, 'no-location', { code: 'EINVALIDREDIRECT' })
+
+ if (request.counter >= request.follow)
+ throw new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect', { code: 'EMAXREDIRECT' })
+
+ return true
+}
+
+// given a Request, a Response, and the user's options return an object
+// with a new Request and a new options object that will be used for
+// following the redirect
+const getRedirect = (request, response, options) => {
+ const _opts = { ...options }
+ const location = response.headers.get('location')
+ const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url)
+ // Comment below is used under the following license:
+ // Copyright (c) 2010-2012 Mikeal Rogers
+ // Licensed under the Apache License, Version 2.0 (the "License");
+ // you may not use this file except in compliance with the License.
+ // You may obtain a copy of the License at
+ // http://www.apache.org/licenses/LICENSE-2.0
+ // Unless required by applicable law or agreed to in writing,
+ // software distributed under the License is distributed on an "AS
+ // IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+ // express or implied. See the License for the specific language
+ // governing permissions and limitations under the License.
+
+ // Remove authorization if changing hostnames (but not if just
+ // changing ports or protocols). This matches the behavior of request:
+ // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
+ if (new url.URL(request.url).hostname !== redirectUrl.hostname)
+ request.headers.delete('authorization')
+
+ // for POST request with 301/302 response, or any request with 303 response,
+ // use GET when following redirect
+ if (response.status === 303 || (request.method === 'POST' && [301, 302].includes(response.status))) {
+ _opts.method = 'GET'
+ _opts.body = null
+ request.headers.delete('content-length')
+ }
+
+ _opts.headers = {}
+ request.headers.forEach((value, key) => {
+ _opts.headers[key] = value
+ })
+
+ _opts.counter = ++request.counter
+ const redirectReq = new Request(url.format(redirectUrl), _opts)
+ return {
+ request: redirectReq,
+ options: _opts,
+ }
+}
+
+const fetch = async (request, options) => {
+ const response = CachePolicy.storable(request, options)
+ ? await cache(request, options)
+ : await remote(request, options)
+
+ // if the request wasn't a GET or HEAD, and the response
+ // status is between 200 and 399 inclusive, invalidate the
+ // request url
+ if (!['GET', 'HEAD'].includes(request.method) &&
+ response.status >= 200 &&
+ response.status <= 399)
+ await cache.invalidate(request, options)
+
+ if (!canFollowRedirect(request, response, options))
+ return response
+
+ const redirect = getRedirect(request, response, options)
+ return fetch(redirect.request, redirect.options)
+}
+
+module.exports = fetch
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js
new file mode 100644
index 0000000000000..6028bc0725129
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js
@@ -0,0 +1,40 @@
+const { FetchError, Headers, Request, Response } = require('minipass-fetch')
+
+const configureOptions = require('./options.js')
+const fetch = require('./fetch.js')
+
+const makeFetchHappen = (url, opts) => {
+ const options = configureOptions(opts)
+
+ const request = new Request(url, options)
+ return fetch(request, options)
+}
+
+makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}) => {
+ if (typeof defaultUrl === 'object') {
+ defaultOptions = defaultUrl
+ defaultUrl = null
+ }
+
+ const defaultedFetch = (url, options = {}) => {
+ const finalUrl = url || defaultUrl
+ const finalOptions = {
+ ...defaultOptions,
+ ...options,
+ headers: {
+ ...defaultOptions.headers,
+ ...options.headers,
+ },
+ }
+ return makeFetchHappen(finalUrl, finalOptions)
+ }
+
+ defaultedFetch.defaults = makeFetchHappen.defaults
+ return defaultedFetch
+}
+
+module.exports = makeFetchHappen
+module.exports.FetchError = FetchError
+module.exports.Headers = Headers
+module.exports.Request = Request
+module.exports.Response = Response
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js
new file mode 100644
index 0000000000000..f6138e6e1d13a
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js
@@ -0,0 +1,44 @@
+const conditionalHeaders = [
+ 'if-modified-since',
+ 'if-none-match',
+ 'if-unmodified-since',
+ 'if-match',
+ 'if-range',
+]
+
+const configureOptions = (opts) => {
+ const {strictSSL, ...options} = { ...opts }
+ options.method = options.method ? options.method.toUpperCase() : 'GET'
+ options.rejectUnauthorized = strictSSL !== false
+
+ if (!options.retry)
+ options.retry = { retries: 0 }
+ else if (typeof options.retry === 'string') {
+ const retries = parseInt(options.retry, 10)
+ if (isFinite(retries))
+ options.retry = { retries }
+ else
+ options.retry = { retries: 0 }
+ } else if (typeof options.retry === 'number')
+ options.retry = { retries: options.retry }
+ else
+ options.retry = { retries: 0, ...options.retry }
+
+ options.cache = options.cache || 'default'
+ if (options.cache === 'default') {
+ const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => {
+ return conditionalHeaders.includes(name.toLowerCase())
+ })
+ if (hasConditionalHeader)
+ options.cache = 'no-store'
+ }
+
+ // cacheManager is deprecated, but if it's set and
+ // cachePath is not we should copy it to the new field
+ if (options.cacheManager && !options.cachePath)
+ options.cachePath = options.cacheManager
+
+ return options
+}
+
+module.exports = configureOptions
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js
new file mode 100644
index 0000000000000..7e4ed24edb530
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js
@@ -0,0 +1,102 @@
+const Minipass = require('minipass')
+const MinipassPipeline = require('minipass-pipeline')
+const fetch = require('minipass-fetch')
+const promiseRetry = require('promise-retry')
+const ssri = require('ssri')
+
+const getAgent = require('./agent.js')
+const pkg = require('../package.json')
+
+const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
+
+const RETRY_ERRORS = [
+ 'ECONNRESET', // remote socket closed on us
+ 'ECONNREFUSED', // remote host refused to open connection
+ 'EADDRINUSE', // failed to bind to a local port (proxy?)
+ 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW
+ 'ERR_SOCKET_TIMEOUT', // same as above, but this one comes from agentkeepalive
+ // Known codes we do NOT retry on:
+ // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
+]
+
+const RETRY_TYPES = [
+ 'request-timeout',
+]
+
+// make a request directly to the remote source,
+// retrying certain classes of errors as well as
+// following redirects (through the cache if necessary)
+// and verifying response integrity
+const remoteFetch = (request, options) => {
+ const agent = getAgent(request.url, options)
+ if (!request.headers.has('connection'))
+ request.headers.set('connection', agent ? 'keep-alive' : 'close')
+
+ if (!request.headers.has('user-agent'))
+ request.headers.set('user-agent', USER_AGENT)
+
+ // keep our own options since we're overriding the agent
+ // and the redirect mode
+ const _opts = {
+ ...options,
+ agent,
+ redirect: 'manual',
+ }
+
+ return promiseRetry(async (retryHandler, attemptNum) => {
+ const req = new fetch.Request(request, _opts)
+ try {
+ let res = await fetch(req, _opts)
+ if (_opts.integrity && res.status === 200) {
+ // we got a 200 response and the user has specified an expected
+ // integrity value, so wrap the response in an ssri stream to verify it
+ const integrityStream = ssri.integrityStream({ integrity: _opts.integrity })
+ res = new fetch.Response(new MinipassPipeline(res.body, integrityStream), res)
+ }
+
+ res.headers.set('x-fetch-attempts', attemptNum)
+
+ // do not retry POST requests, or requests with a streaming body
+ // do retry requests with a 408, 420, 429 or 500+ status in the response
+ const isStream = Minipass.isStream(req.body)
+ const isRetriable = req.method !== 'POST' &&
+ !isStream &&
+ ([408, 420, 429].includes(res.status) || res.status >= 500)
+
+ if (isRetriable) {
+ if (typeof options.onRetry === 'function')
+ options.onRetry(res)
+
+ return retryHandler(res)
+ }
+
+ return res
+ } catch (err) {
+ const code = (err.code === 'EPROMISERETRY')
+ ? err.retried.code
+ : err.code
+
+ // err.retried will be the thing that was thrown from above
+ // if it's a response, we just got a bad status code and we
+ // can re-throw to allow the retry
+ const isRetryError = err.retried instanceof fetch.Response ||
+ (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type))
+
+ if (req.method === 'POST' || isRetryError)
+ throw err
+
+ if (typeof options.onRetry === 'function')
+ options.onRetry(err)
+
+ return retryHandler(err)
+ }
+ }, options.retry).catch((err) => {
+ // don't reject for http errors, just return them
+ if (err.status >= 400 && err.type !== 'system')
+ return err
+
+ throw err
+ })
+}
+
+module.exports = remoteFetch
diff --git a/node_modules/node-gyp/node_modules/make-fetch-happen/package.json b/node_modules/node-gyp/node_modules/make-fetch-happen/package.json
new file mode 100644
index 0000000000000..dae7b37da4069
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/package.json
@@ -0,0 +1,76 @@
+{
+ "name": "make-fetch-happen",
+ "version": "9.1.0",
+ "description": "Opinionated, caching, retrying fetch client",
+ "main": "lib/index.js",
+ "files": [
+ "lib"
+ ],
+ "scripts": {
+ "preversion": "npm t",
+ "postversion": "npm publish",
+ "prepublishOnly": "git push --follow-tags",
+ "test": "tap",
+ "posttest": "npm run lint",
+ "eslint": "eslint",
+ "lint": "npm run eslint -- lib test",
+ "lintfix": "npm run lint -- --fix"
+ },
+ "repository": "https://github.com/npm/make-fetch-happen",
+ "keywords": [
+ "http",
+ "request",
+ "fetch",
+ "mean girls",
+ "caching",
+ "cache",
+ "subresource integrity"
+ ],
+ "author": {
+ "name": "Kat Marchán",
+ "email": "kzm@zkat.tech",
+ "twitter": "maybekatz"
+ },
+ "license": "ISC",
+ "dependencies": {
+ "agentkeepalive": "^4.1.3",
+ "cacache": "^15.2.0",
+ "http-cache-semantics": "^4.1.0",
+ "http-proxy-agent": "^4.0.1",
+ "https-proxy-agent": "^5.0.0",
+ "is-lambda": "^1.0.1",
+ "lru-cache": "^6.0.0",
+ "minipass": "^3.1.3",
+ "minipass-collect": "^1.0.2",
+ "minipass-fetch": "^1.3.2",
+ "minipass-flush": "^1.0.5",
+ "minipass-pipeline": "^1.2.4",
+ "negotiator": "^0.6.2",
+ "promise-retry": "^2.0.1",
+ "socks-proxy-agent": "^6.0.0",
+ "ssri": "^8.0.0"
+ },
+ "devDependencies": {
+ "eslint": "^7.26.0",
+ "eslint-plugin-import": "^2.23.2",
+ "eslint-plugin-node": "^11.1.0",
+ "eslint-plugin-promise": "^5.1.0",
+ "eslint-plugin-standard": "^5.0.0",
+ "mkdirp": "^1.0.4",
+ "nock": "^13.0.11",
+ "npmlog": "^5.0.0",
+ "require-inject": "^1.4.2",
+ "rimraf": "^3.0.2",
+ "safe-buffer": "^5.2.1",
+ "standard-version": "^9.3.0",
+ "tap": "^15.0.9"
+ },
+ "engines": {
+ "node": ">= 10"
+ },
+ "tap": {
+ "color": 1,
+ "files": "test/*.js",
+ "check-coverage": true
+ }
+}
diff --git a/node_modules/npm-profile/LICENSE b/node_modules/npm-profile/LICENSE
deleted file mode 100644
index 7953647e7760b..0000000000000
--- a/node_modules/npm-profile/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright npm, Inc
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-profile/LICENSE.md b/node_modules/npm-profile/LICENSE.md
new file mode 100644
index 0000000000000..5fc208ff122e0
--- /dev/null
+++ b/node_modules/npm-profile/LICENSE.md
@@ -0,0 +1,20 @@
+
+
+ISC License
+
+Copyright npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this
+software for any purpose with or without fee is hereby
+granted, provided that the above copyright notice and this
+permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
+WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
+EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-profile/index.js b/node_modules/npm-profile/lib/index.js
similarity index 92%
rename from node_modules/npm-profile/index.js
rename to node_modules/npm-profile/lib/index.js
index 4f2a2ae7cc2ff..aa322e37f4824 100644
--- a/node_modules/npm-profile/index.js
+++ b/node_modules/npm-profile/lib/index.js
@@ -1,7 +1,7 @@
'use strict'
const fetch = require('npm-registry-fetch')
-const { HttpErrorBase } = require('npm-registry-fetch/errors.js')
+const { HttpErrorBase } = require('npm-registry-fetch/lib/errors')
const os = require('os')
const { URL } = require('url')
@@ -57,7 +57,7 @@ const webAuth = (opener, opts, body) => {
return fetch(target, {
...opts,
method: 'POST',
- body
+ body,
}).then(res => {
return Promise.all([res, res.json()])
}).then(([res, content]) => {
@@ -76,7 +76,7 @@ const webAuth = (opener, opts, body) => {
if ((er.statusCode >= 400 && er.statusCode <= 499) || er.statusCode === 500) {
throw new WebLoginNotSupported('POST', {
status: er.statusCode,
- headers: { raw: () => er.headers }
+ headers: { raw: () => er.headers },
}, er.body)
} else {
throw er
@@ -115,11 +115,11 @@ const adduserCouch = (username, email, password, opts = {}) => {
email: email,
type: 'user',
roles: [],
- date: new Date().toISOString()
+ date: new Date().toISOString(),
}
const logObj = {
...body,
- password: 'XXXXX'
+ password: 'XXXXX',
}
process.emit('log', 'verbose', 'adduser', 'before first PUT', logObj)
@@ -127,7 +127,7 @@ const adduserCouch = (username, email, password, opts = {}) => {
return fetch.json(target, {
...opts,
method: 'PUT',
- body
+ body,
}).then(result => {
result.username = username
return result
@@ -141,11 +141,11 @@ const loginCouch = (username, password, opts = {}) => {
password: password,
type: 'user',
roles: [],
- date: new Date().toISOString()
+ date: new Date().toISOString(),
}
const logObj = {
...body,
- password: 'XXXXX'
+ password: 'XXXXX',
}
process.emit('log', 'verbose', 'login', 'before first PUT', logObj)
@@ -153,16 +153,18 @@ const loginCouch = (username, password, opts = {}) => {
return fetch.json(target, {
...opts,
method: 'PUT',
- body
+ body,
}).catch(err => {
if (err.code === 'E400') {
err.message = `There is no user with the username "${username}".`
throw err
}
- if (err.code !== 'E409') throw err
+ if (err.code !== 'E409') {
+ throw err
+ }
return fetch.json(target, {
...opts,
- query: { write: true }
+ query: { write: true },
}).then(result => {
Object.keys(result).forEach(k => {
if (!body[k] || k === 'roles') {
@@ -177,8 +179,8 @@ const loginCouch = (username, password, opts = {}) => {
forceAuth: {
username,
password: Buffer.from(password, 'utf8').toString('base64'),
- otp
- }
+ otp,
+ },
})
})
}).then(result => {
@@ -192,12 +194,14 @@ const get = (opts = {}) => fetch.json('/-/npm/v1/user', opts)
const set = (profile, opts = {}) => {
Object.keys(profile).forEach(key => {
// profile keys can't be empty strings, but they CAN be null
- if (profile[key] === '') profile[key] = null
+ if (profile[key] === '') {
+ profile[key] = null
+ }
})
return fetch.json('/-/npm/v1/user', {
...opts,
method: 'POST',
- body: profile
+ body: profile,
})
}
@@ -220,7 +224,7 @@ const removeToken = (tokenKey, opts = {}) => {
return fetch(target, {
...opts,
method: 'DELETE',
- ignoreBody: true
+ ignoreBody: true,
}).then(() => null)
}
@@ -231,8 +235,8 @@ const createToken = (password, readonly, cidrs, opts = {}) => {
body: {
password: password,
readonly: readonly,
- cidr_whitelist: cidrs
- }
+ cidr_whitelist: cidrs,
+ },
})
}
@@ -267,5 +271,5 @@ module.exports = {
set,
listTokens,
removeToken,
- createToken
+ createToken,
}
diff --git a/node_modules/npm-profile/package.json b/node_modules/npm-profile/package.json
index 43cc7c921bb04..8745c2559f33f 100644
--- a/node_modules/npm-profile/package.json
+++ b/node_modules/npm-profile/package.json
@@ -1,39 +1,45 @@
{
"name": "npm-profile",
- "version": "5.0.4",
+ "version": "6.0.0",
"description": "Library for updating an npmjs.com profile",
"keywords": [],
- "author": "Rebecca Turner (http://re-becca.org/)",
+ "author": "GitHub Inc.",
"license": "ISC",
"dependencies": {
- "npm-registry-fetch": "^11.0.0"
+ "npm-registry-fetch": "^12.0.0"
},
- "main": "index.js",
+ "main": "./lib/index.js",
"repository": {
"type": "git",
"url": "git+https://github.com/npm/npm-profile.git"
},
"files": [
- "index.js"
+ "bin",
+ "lib"
],
"devDependencies": {
- "nock": "^12.0.1",
- "require-inject": "^1.4.4",
- "standard": "^14.3.1",
- "tap": "^14.10.6"
+ "@npmcli/template-oss": "^2.5.1",
+ "nock": "^13.2.1",
+ "tap": "^15.1.5"
},
"scripts": {
"preversion": "npm test",
"postversion": "npm publish",
"prepublishOnly": "git push origin --follow-tags",
- "posttest": "standard index.js",
+ "posttest": "npm run lint",
"test": "tap",
- "snap": "tap"
+ "snap": "tap",
+ "lint": "eslint '**/*.js'",
+ "postlint": "npm-template-check",
+ "lintfix": "npm run lint -- --fix"
},
"tap": {
"check-coverage": true
},
"engines": {
- "node": ">=10"
+ "node": "^12.13.0 || ^14.15.0 || >=16"
+ },
+ "templateOSS": {
+ "version": "2.5.1"
}
}
diff --git a/node_modules/npm-registry-fetch/LICENSE.md b/node_modules/npm-registry-fetch/LICENSE.md
index 8d28acf866d93..5fc208ff122e0 100644
--- a/node_modules/npm-registry-fetch/LICENSE.md
+++ b/node_modules/npm-registry-fetch/LICENSE.md
@@ -1,16 +1,20 @@
+
+
ISC License
-Copyright (c) npm, Inc.
+Copyright npm, Inc.
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
+Permission to use, copy, modify, and/or distribute this
+software for any purpose with or without fee is hereby
+granted, provided that the above copyright notice and this
+permission notice appear in all copies.
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
+WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
+EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-registry-fetch/auth.js b/node_modules/npm-registry-fetch/lib/auth.js
similarity index 94%
rename from node_modules/npm-registry-fetch/auth.js
rename to node_modules/npm-registry-fetch/lib/auth.js
index 01a4436a5d2a8..e6b50b12eb207 100644
--- a/node_modules/npm-registry-fetch/auth.js
+++ b/node_modules/npm-registry-fetch/lib/auth.js
@@ -12,8 +12,9 @@ const regKeyFromURI = (uri, opts) => {
let regKey = `//${parsed.host}${parsed.pathname}`
while (regKey.length > '//'.length) {
// got some auth for this URI
- if (hasAuth(regKey, opts))
+ if (hasAuth(regKey, opts)) {
return regKey
+ }
// can be either //host/some/path/:_auth or //host/some/path:_auth
// walk up by removing EITHER what's after the slash OR the slash itself
@@ -44,8 +45,9 @@ const getRegistry = opts => {
const getAuth = (uri, opts = {}) => {
const { forceAuth } = opts
- if (!uri)
+ if (!uri) {
throw new Error('URI is required')
+ }
const regKey = regKeyFromURI(uri, forceAuth || opts)
// we are only allowed to use what's in forceAuth if specified
@@ -62,9 +64,9 @@ const getAuth = (uri, opts = {}) => {
// no auth for this URI, but might have it for the registry
if (!regKey) {
const registry = getRegistry(opts)
- if (registry && uri !== registry && sameHost(uri, registry))
+ if (registry && uri !== registry && sameHost(uri, registry)) {
return getAuth(registry, opts)
- else if (registry !== opts.registry) {
+ } else if (registry !== opts.registry) {
// If making a tarball request to a different base URI than the
// registry where we logged in, but the same auth SHOULD be sent
// to that artifact host, then we track where it was coming in from,
@@ -96,11 +98,11 @@ class Auth {
this.token = null
this.auth = null
this.isBasicAuth = false
- if (token)
+ if (token) {
this.token = token
- else if (auth)
+ } else if (auth) {
this.auth = auth
- else if (username && password) {
+ } else if (username && password) {
const p = Buffer.from(password, 'base64').toString('utf8')
this.auth = Buffer.from(`${username}:${p}`, 'utf8').toString('base64')
this.isBasicAuth = true
diff --git a/node_modules/npm-registry-fetch/check-response.js b/node_modules/npm-registry-fetch/lib/check-response.js
similarity index 89%
rename from node_modules/npm-registry-fetch/check-response.js
rename to node_modules/npm-registry-fetch/lib/check-response.js
index 8bd85661ee8ca..26043a96de854 100644
--- a/node_modules/npm-registry-fetch/check-response.js
+++ b/node_modules/npm-registry-fetch/lib/check-response.js
@@ -4,11 +4,14 @@ const errors = require('./errors.js')
const { Response } = require('minipass-fetch')
const defaultOpts = require('./default-opts.js')
+/* eslint-disable-next-line max-len */
+const moreInfoUrl = 'https://github.com/npm/cli/wiki/No-auth-for-URI,-but-auth-present-for-scoped-registry'
const checkResponse =
async ({ method, uri, res, registry, startTime, auth, opts }) => {
opts = { ...defaultOpts, ...opts }
- if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache'))
+ if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) {
opts.log.notice('', res.headers.get('npm-notice'))
+ }
if (res.status >= 400) {
logRequest(method, res, startTime, opts)
@@ -21,7 +24,7 @@ const checkResponse =
URI: ${uri}
Scoped Registry Key: ${auth.scopeAuthKey}
-More info here: https://github.com/npm/cli/wiki/No-auth-for-URI,-but-auth-present-for-scoped-registry`)
+More info here: ${moreInfoUrl}`)
}
return checkErrors(method, res, startTime, opts)
} else {
@@ -46,8 +49,9 @@ function logRequest (method, res, startTime, opts) {
try {
const { URL } = require('url')
const url = new URL(res.url)
- if (url.password)
+ if (url.password) {
url.password = '***'
+ }
urlStr = url.toString()
} catch (er) {
@@ -85,7 +89,11 @@ function checkErrors (method, res, startTime, opts) {
method, res, parsed, opts.spec
)
}
- } else if (res.status === 401 && body != null && /one-time pass/.test(body.toString('utf8'))) {
+ } else if (
+ res.status === 401 &&
+ body != null &&
+ /one-time pass/.test(body.toString('utf8'))
+ ) {
// Heuristic for malformed OTP responses that don't include the
// www-authenticate header.
throw new errors.HttpErrorAuthOTP(
diff --git a/node_modules/npm-registry-fetch/default-opts.js b/node_modules/npm-registry-fetch/lib/default-opts.js
similarity index 90%
rename from node_modules/npm-registry-fetch/default-opts.js
rename to node_modules/npm-registry-fetch/lib/default-opts.js
index 9ca3f97d0352e..e8e8221da4a58 100644
--- a/node_modules/npm-registry-fetch/default-opts.js
+++ b/node_modules/npm-registry-fetch/lib/default-opts.js
@@ -1,4 +1,4 @@
-const pkg = require('./package.json')
+const pkg = require('../package.json')
module.exports = {
log: require('./silentlog.js'),
maxSockets: 12,
diff --git a/node_modules/npm-registry-fetch/errors.js b/node_modules/npm-registry-fetch/lib/errors.js
similarity index 97%
rename from node_modules/npm-registry-fetch/errors.js
rename to node_modules/npm-registry-fetch/lib/errors.js
index e65e5fbd80dda..0efc923e3e900 100644
--- a/node_modules/npm-registry-fetch/errors.js
+++ b/node_modules/npm-registry-fetch/lib/errors.js
@@ -8,10 +8,11 @@ function packageName (href) {
if (!basePath.match(/^-/)) {
basePath = basePath.split('/')
var index = basePath.indexOf('_rewrite')
- if (index === -1)
+ if (index === -1) {
index = basePath.length - 1
- else
+ } else {
index++
+ }
return decodeURIComponent(basePath[index])
}
} catch (_) {
diff --git a/node_modules/npm-registry-fetch/index.js b/node_modules/npm-registry-fetch/lib/index.js
similarity index 87%
rename from node_modules/npm-registry-fetch/index.js
rename to node_modules/npm-registry-fetch/lib/index.js
index 35fab75bcade9..19c921403e5cd 100644
--- a/node_modules/npm-registry-fetch/index.js
+++ b/node_modules/npm-registry-fetch/lib/index.js
@@ -60,11 +60,14 @@ function regFetch (uri, /* istanbul ignore next */ opts_ = {}) {
typeof body === 'object' &&
typeof body.then === 'function'
- if (body && !bodyIsStream && !bodyIsPromise && typeof body !== 'string' && !Buffer.isBuffer(body)) {
+ if (
+ body && !bodyIsStream && !bodyIsPromise && typeof body !== 'string' && !Buffer.isBuffer(body)
+ ) {
headers['content-type'] = headers['content-type'] || 'application/json'
body = JSON.stringify(body)
- } else if (body && !headers['content-type'])
+ } else if (body && !headers['content-type']) {
headers['content-type'] = 'application/octet-stream'
+ }
if (opts.gzip) {
headers['content-encoding'] = 'gzip'
@@ -73,8 +76,9 @@ function regFetch (uri, /* istanbul ignore next */ opts_ = {}) {
body.on('error', /* istanbul ignore next: unlikely and hard to test */
err => gz.emit('error', err))
body = body.pipe(gz)
- } else if (!bodyIsPromise)
+ } else if (!bodyIsPromise) {
body = new zlib.Gzip().end(body).concat()
+ }
}
const parsed = new url.URL(uri)
@@ -84,8 +88,9 @@ function regFetch (uri, /* istanbul ignore next */ opts_ = {}) {
: opts.query
Object.keys(q).forEach(key => {
- if (q[key] !== undefined)
+ if (q[key] !== undefined) {
parsed.searchParams.set(key, q[key])
+ }
})
uri = url.format(parsed)
}
@@ -105,7 +110,7 @@ function regFetch (uri, /* istanbul ignore next */ opts_ = {}) {
algorithms: opts.algorithms,
body,
cache: getCacheMode(opts),
- cacheManager: opts.cache,
+ cachePath: opts.cache,
ca: opts.ca,
cert: opts.cert,
headers,
@@ -138,17 +143,24 @@ function regFetch (uri, /* istanbul ignore next */ opts_ = {}) {
if (typeof opts.otpPrompt === 'function') {
return p.catch(async er => {
if (er instanceof HttpErrorAuthOTP) {
+ let otp
// if otp fails to complete, we fail with that failure
- const otp = await opts.otpPrompt()
- // if no otp provided, throw the original HTTP error
- if (!otp)
+ try {
+ otp = await opts.otpPrompt()
+ } catch (_) {
+ // ignore this error
+ }
+ // if no otp provided, or otpPrompt errored, throw the original HTTP error
+ if (!otp) {
throw er
+ }
return regFetch(uri, { ...opts, otp })
}
throw er
})
- } else
+ } else {
return p
+ }
}
return Promise.resolve(body).then(doFetch)
@@ -178,11 +190,13 @@ function pickRegistry (spec, opts = {}) {
let registry = spec.scope &&
opts[spec.scope.replace(/^@?/, '@') + ':registry']
- if (!registry && opts.scope)
+ if (!registry && opts.scope) {
registry = opts[opts.scope.replace(/^@?/, '@') + ':registry']
+ }
- if (!registry)
+ if (!registry) {
registry = opts.registry || defaultOpts.registry
+ }
return registry
}
@@ -199,24 +213,29 @@ function getHeaders (uri, auth, opts) {
'user-agent': opts.userAgent,
}, opts.headers || {})
- if (opts.projectScope)
- headers['npm-scope'] = opts.projectScope
+ if (opts.scope) {
+ headers['npm-scope'] = opts.scope
+ }
- if (opts.npmSession)
+ if (opts.npmSession) {
headers['npm-session'] = opts.npmSession
+ }
- if (opts.npmCommand)
+ if (opts.npmCommand) {
headers['npm-command'] = opts.npmCommand
+ }
// If a tarball is hosted on a different place than the manifest, only send
// credentials on `alwaysAuth`
- if (auth.token)
+ if (auth.token) {
headers.authorization = `Bearer ${auth.token}`
- else if (auth.auth)
+ } else if (auth.auth) {
headers.authorization = `Basic ${auth.auth}`
+ }
- if (opts.otp)
+ if (opts.otp) {
headers['npm-otp'] = opts.otp
+ }
return headers
}
diff --git a/node_modules/npm-registry-fetch/silentlog.js b/node_modules/npm-registry-fetch/lib/silentlog.js
similarity index 100%
rename from node_modules/npm-registry-fetch/silentlog.js
rename to node_modules/npm-registry-fetch/lib/silentlog.js
diff --git a/node_modules/npm-registry-fetch/package.json b/node_modules/npm-registry-fetch/package.json
index e4eaabaa5b09a..ff4482b1fdc9e 100644
--- a/node_modules/npm-registry-fetch/package.json
+++ b/node_modules/npm-registry-fetch/package.json
@@ -1,22 +1,25 @@
{
"name": "npm-registry-fetch",
- "version": "11.0.0",
+ "version": "12.0.1",
"description": "Fetch-based http client for use with npm registry APIs",
- "main": "index.js",
+ "main": "lib",
"files": [
- "*.js"
+ "bin",
+ "lib"
],
"scripts": {
"eslint": "eslint",
- "lint": "npm run npmclilint -- \"*.*js\" \"test/**/*.*js\"",
+ "lint": "eslint '**/*.js'",
"lintfix": "npm run lint -- --fix",
"prepublishOnly": "git push origin --follow-tags",
"preversion": "npm test",
"postversion": "npm publish",
"test": "tap",
- "posttest": "npm run lint --",
+ "posttest": "npm run lint",
"npmclilint": "npmcli-lint",
- "postsnap": "npm run lintfix --"
+ "postsnap": "npm run lintfix --",
+ "postlint": "npm-template-check",
+ "snap": "tap"
},
"repository": "https://github.com/npm/npm-registry-fetch",
"keywords": [
@@ -24,14 +27,10 @@
"registry",
"fetch"
],
- "author": {
- "name": "Kat Marchán",
- "email": "kzm@sykosomatic.org",
- "twitter": "maybekatz"
- },
+ "author": "GitHub Inc.",
"license": "ISC",
"dependencies": {
- "make-fetch-happen": "^9.0.1",
+ "make-fetch-happen": "^10.0.0",
"minipass": "^3.1.3",
"minipass-fetch": "^1.3.0",
"minipass-json-stream": "^1.0.1",
@@ -39,7 +38,7 @@
"npm-package-arg": "^8.0.0"
},
"devDependencies": {
- "@npmcli/lint": "^1.0.1",
+ "@npmcli/template-oss": "^2.5.1",
"cacache": "^15.0.0",
"nock": "^13.1.0",
"npmlog": "^4.1.2",
@@ -52,6 +51,9 @@
"test-ignore": "test[\\\\/](util|cache)[\\\\/]"
},
"engines": {
- "node": ">=10"
+ "node": "^12.13.0 || ^14.15.0 || >=16"
+ },
+ "templateOSS": {
+ "version": "2.5.1"
}
}
diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json
index d0fe0a065b414..ffbc067f68dee 100644
--- a/node_modules/pacote/package.json
+++ b/node_modules/pacote/package.json
@@ -1,6 +1,6 @@
{
"name": "pacote",
- "version": "12.0.2",
+ "version": "12.0.3",
"description": "JavaScript package downloader",
"author": "Isaac Z. Schlueter (https://izs.me)",
"bin": {
@@ -46,7 +46,7 @@
"npm-package-arg": "^8.0.1",
"npm-packlist": "^3.0.0",
"npm-pick-manifest": "^6.0.0",
- "npm-registry-fetch": "^11.0.0",
+ "npm-registry-fetch": "^12.0.0",
"promise-retry": "^2.0.1",
"read-package-json-fast": "^2.0.1",
"rimraf": "^3.0.2",
diff --git a/package-lock.json b/package-lock.json
index f38e1e4f75c63..8b2321ecce27f 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "npm",
- "version": "8.3.2",
+ "version": "8.4.0",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "npm",
- "version": "8.3.2",
+ "version": "8.4.0",
"bundleDependencies": [
"@isaacs/string-locale-compare",
"@npmcli/arborist",
@@ -87,7 +87,7 @@
],
"dependencies": {
"@isaacs/string-locale-compare": "^1.1.0",
- "@npmcli/arborist": "^4.2.1",
+ "@npmcli/arborist": "^4.3.0",
"@npmcli/ci-detect": "^1.4.0",
"@npmcli/config": "^2.4.0",
"@npmcli/map-workspaces": "^2.0.0",
@@ -111,18 +111,18 @@
"init-package-json": "^2.0.5",
"is-cidr": "^4.0.2",
"json-parse-even-better-errors": "^2.3.1",
- "libnpmaccess": "^5.0.0",
+ "libnpmaccess": "^5.0.1",
"libnpmdiff": "^3.0.0",
- "libnpmexec": "^3.0.2",
+ "libnpmexec": "^3.0.3",
"libnpmfund": "^2.0.2",
- "libnpmhook": "^7.0.0",
- "libnpmorg": "^3.0.0",
+ "libnpmhook": "^7.0.1",
+ "libnpmorg": "^3.0.1",
"libnpmpack": "^3.0.1",
- "libnpmpublish": "^5.0.0",
- "libnpmsearch": "^4.0.0",
- "libnpmteam": "^3.0.0",
+ "libnpmpublish": "^5.0.1",
+ "libnpmsearch": "^4.0.1",
+ "libnpmteam": "^3.0.1",
"libnpmversion": "^2.0.2",
- "make-fetch-happen": "^9.1.0",
+ "make-fetch-happen": "^10.0.0",
"minipass": "^3.1.6",
"minipass-pipeline": "^1.2.4",
"mkdirp": "^1.0.4",
@@ -134,12 +134,12 @@
"npm-install-checks": "^4.0.0",
"npm-package-arg": "^8.1.5",
"npm-pick-manifest": "^6.1.1",
- "npm-profile": "^5.0.3",
- "npm-registry-fetch": "^11.0.0",
+ "npm-profile": "^6.0.0",
+ "npm-registry-fetch": "^12.0.1",
"npm-user-validate": "^1.0.1",
"npmlog": "^6.0.0",
"opener": "^1.5.2",
- "pacote": "^12.0.2",
+ "pacote": "^12.0.3",
"parse-conflict-json": "^2.0.1",
"proc-log": "^1.0.0",
"qrcode-terminal": "^0.12.0",
@@ -1008,12 +1008,12 @@
}
},
"node_modules/@tootallnate/once": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz",
- "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==",
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz",
+ "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==",
"inBundle": true,
"engines": {
- "node": ">= 6"
+ "node": ">= 10"
}
},
"node_modules/@types/hast": {
@@ -3859,12 +3859,12 @@
"inBundle": true
},
"node_modules/http-proxy-agent": {
- "version": "4.0.1",
- "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz",
- "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==",
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz",
+ "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==",
"inBundle": true,
"dependencies": {
- "@tootallnate/once": "1",
+ "@tootallnate/once": "2",
"agent-base": "6",
"debug": "4"
},
@@ -4669,15 +4669,6 @@
}
}
},
- "node_modules/jsdom/node_modules/@tootallnate/once": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz",
- "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==",
- "dev": true,
- "engines": {
- "node": ">= 10"
- }
- },
"node_modules/jsdom/node_modules/acorn": {
"version": "8.5.0",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.5.0.tgz",
@@ -4690,20 +4681,6 @@
"node": ">=0.4.0"
}
},
- "node_modules/jsdom/node_modules/http-proxy-agent": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz",
- "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==",
- "dev": true,
- "dependencies": {
- "@tootallnate/once": "2",
- "agent-base": "6",
- "debug": "4"
- },
- "engines": {
- "node": ">= 6"
- }
- },
"node_modules/jsesc": {
"version": "2.5.2",
"resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz",
@@ -5039,15 +5016,15 @@
"peer": true
},
"node_modules/make-fetch-happen": {
- "version": "9.1.0",
- "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-9.1.0.tgz",
- "integrity": "sha512-+zopwDy7DNknmwPQplem5lAZX/eCOzSvSNNcSKm5eVwTkOBzoktEfXsa9L23J/GIRhxRsaxzkPEhrJEpE2F4Gg==",
+ "version": "10.0.0",
+ "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.0.0.tgz",
+ "integrity": "sha512-CREcDkbKZZ64g5MN1FT+u58mDHX9FQFFtFyio5HonX44BdQdytqPZBXUz+6ibi2w/6ncji59f2phyXGSMGpgzA==",
"inBundle": true,
"dependencies": {
"agentkeepalive": "^4.1.3",
"cacache": "^15.2.0",
"http-cache-semantics": "^4.1.0",
- "http-proxy-agent": "^4.0.1",
+ "http-proxy-agent": "^5.0.0",
"https-proxy-agent": "^5.0.0",
"is-lambda": "^1.0.1",
"lru-cache": "^6.0.0",
@@ -5056,13 +5033,13 @@
"minipass-fetch": "^1.3.2",
"minipass-flush": "^1.0.5",
"minipass-pipeline": "^1.2.4",
- "negotiator": "^0.6.2",
+ "negotiator": "^0.6.3",
"promise-retry": "^2.0.1",
"socks-proxy-agent": "^6.0.0",
"ssri": "^8.0.0"
},
"engines": {
- "node": ">= 10"
+ "node": "^12.13.0 || ^14.15.0 || >=16"
}
},
"node_modules/markdown-escapes": {
@@ -5366,9 +5343,9 @@
"dev": true
},
"node_modules/negotiator": {
- "version": "0.6.2",
- "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz",
- "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==",
+ "version": "0.6.3",
+ "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz",
+ "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==",
"inBundle": true,
"engines": {
"node": ">= 0.6"
@@ -5437,6 +5414,56 @@
"node": ">= 10.12.0"
}
},
+ "node_modules/node-gyp/node_modules/@tootallnate/once": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz",
+ "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==",
+ "inBundle": true,
+ "engines": {
+ "node": ">= 6"
+ }
+ },
+ "node_modules/node-gyp/node_modules/http-proxy-agent": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz",
+ "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==",
+ "inBundle": true,
+ "dependencies": {
+ "@tootallnate/once": "1",
+ "agent-base": "6",
+ "debug": "4"
+ },
+ "engines": {
+ "node": ">= 6"
+ }
+ },
+ "node_modules/node-gyp/node_modules/make-fetch-happen": {
+ "version": "9.1.0",
+ "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-9.1.0.tgz",
+ "integrity": "sha512-+zopwDy7DNknmwPQplem5lAZX/eCOzSvSNNcSKm5eVwTkOBzoktEfXsa9L23J/GIRhxRsaxzkPEhrJEpE2F4Gg==",
+ "inBundle": true,
+ "dependencies": {
+ "agentkeepalive": "^4.1.3",
+ "cacache": "^15.2.0",
+ "http-cache-semantics": "^4.1.0",
+ "http-proxy-agent": "^4.0.1",
+ "https-proxy-agent": "^5.0.0",
+ "is-lambda": "^1.0.1",
+ "lru-cache": "^6.0.0",
+ "minipass": "^3.1.3",
+ "minipass-collect": "^1.0.2",
+ "minipass-fetch": "^1.3.2",
+ "minipass-flush": "^1.0.5",
+ "minipass-pipeline": "^1.2.4",
+ "negotiator": "^0.6.2",
+ "promise-retry": "^2.0.1",
+ "socks-proxy-agent": "^6.0.0",
+ "ssri": "^8.0.0"
+ },
+ "engines": {
+ "node": ">= 10"
+ }
+ },
"node_modules/node-modules-regexp": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz",
@@ -5595,24 +5622,24 @@
}
},
"node_modules/npm-profile": {
- "version": "5.0.4",
- "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-5.0.4.tgz",
- "integrity": "sha512-OKtU7yoAEBOnc8zJ+/uo5E4ugPp09sopo+6y1njPp+W99P8DvQon3BJYmpvyK2Bf1+3YV5LN1bvgXRoZ1LUJBA==",
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-6.0.0.tgz",
+ "integrity": "sha512-ZiwXx3Fq3wNqJMz+d4AVZ9X4xAw/0TqsvdiyDF5Y+h9lQ7AjYDOxY06lj7AP5vUBqB/7k/0oNFQOx03Ay0iHPw==",
"inBundle": true,
"dependencies": {
- "npm-registry-fetch": "^11.0.0"
+ "npm-registry-fetch": "^12.0.0"
},
"engines": {
- "node": ">=10"
+ "node": "^12.13.0 || ^14.15.0 || >=16"
}
},
"node_modules/npm-registry-fetch": {
- "version": "11.0.0",
- "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-11.0.0.tgz",
- "integrity": "sha512-jmlgSxoDNuhAtxUIG6pVwwtz840i994dL14FoNVZisrmZW5kWd63IUTNv1m/hyRSGSqWjCUp/YZlS1BJyNp9XA==",
+ "version": "12.0.1",
+ "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-12.0.1.tgz",
+ "integrity": "sha512-ricy4ezH3Uv0d4am6RSwHjCYTWJI74NJjurIigWMAG7Vs3PFyd0TUlkrez5L0AgaPzDLRsEzqb5cOZ/Ue01bmA==",
"inBundle": true,
"dependencies": {
- "make-fetch-happen": "^9.0.1",
+ "make-fetch-happen": "^10.0.0",
"minipass": "^3.1.3",
"minipass-fetch": "^1.3.0",
"minipass-json-stream": "^1.0.1",
@@ -5620,7 +5647,7 @@
"npm-package-arg": "^8.0.0"
},
"engines": {
- "node": ">=10"
+ "node": "^12.13.0 || ^14.15.0 || >=16"
}
},
"node_modules/npm-user-validate": {
@@ -5968,9 +5995,9 @@
}
},
"node_modules/pacote": {
- "version": "12.0.2",
- "resolved": "https://registry.npmjs.org/pacote/-/pacote-12.0.2.tgz",
- "integrity": "sha512-Ar3mhjcxhMzk+OVZ8pbnXdb0l8+pimvlsqBGRNkble2NVgyqOGE3yrCGi/lAYq7E7NRDMz89R1Wx5HIMCGgeYg==",
+ "version": "12.0.3",
+ "resolved": "https://registry.npmjs.org/pacote/-/pacote-12.0.3.tgz",
+ "integrity": "sha512-CdYEl03JDrRO3x18uHjBYA9TyoW8gy+ThVcypcDkxPtKlw76e4ejhYB6i9lJ+/cebbjpqPW/CijjqxwDTts8Ow==",
"inBundle": true,
"dependencies": {
"@npmcli/git": "^2.1.0",
@@ -5986,7 +6013,7 @@
"npm-package-arg": "^8.0.1",
"npm-packlist": "^3.0.0",
"npm-pick-manifest": "^6.0.0",
- "npm-registry-fetch": "^11.0.0",
+ "npm-registry-fetch": "^12.0.0",
"promise-retry": "^2.0.1",
"read-package-json-fast": "^2.0.1",
"rimraf": "^3.0.2",
@@ -10482,7 +10509,7 @@
},
"workspaces/arborist": {
"name": "@npmcli/arborist",
- "version": "4.2.1",
+ "version": "4.3.0",
"license": "ISC",
"dependencies": {
"@isaacs/string-locale-compare": "^1.1.0",
@@ -10504,7 +10531,7 @@
"npm-install-checks": "^4.0.0",
"npm-package-arg": "^8.1.5",
"npm-pick-manifest": "^6.1.0",
- "npm-registry-fetch": "^11.0.0",
+ "npm-registry-fetch": "^12.0.1",
"pacote": "^12.0.2",
"parse-conflict-json": "^2.0.1",
"proc-log": "^1.0.0",
@@ -10550,13 +10577,13 @@
}
},
"workspaces/libnpmaccess": {
- "version": "5.0.0",
+ "version": "5.0.1",
"license": "ISC",
"dependencies": {
"aproba": "^2.0.0",
"minipass": "^3.1.1",
"npm-package-arg": "^8.1.2",
- "npm-registry-fetch": "^11.0.0"
+ "npm-registry-fetch": "^12.0.1"
},
"devDependencies": {
"@npmcli/template-oss": "^2.4.2",
@@ -10590,7 +10617,7 @@
}
},
"workspaces/libnpmexec": {
- "version": "3.0.2",
+ "version": "3.0.3",
"license": "ISC",
"dependencies": {
"@npmcli/arborist": "^4.0.0",
@@ -10629,11 +10656,11 @@
}
},
"workspaces/libnpmhook": {
- "version": "7.0.0",
+ "version": "7.0.1",
"license": "ISC",
"dependencies": {
"aproba": "^2.0.0",
- "npm-registry-fetch": "^11.0.0"
+ "npm-registry-fetch": "^12.0.1"
},
"devDependencies": {
"@npmcli/template-oss": "^2.4.2",
@@ -10704,11 +10731,11 @@
}
},
"workspaces/libnpmorg": {
- "version": "3.0.0",
+ "version": "3.0.1",
"license": "ISC",
"dependencies": {
"aproba": "^2.0.0",
- "npm-registry-fetch": "^11.0.0"
+ "npm-registry-fetch": "^12.0.1"
},
"devDependencies": {
"@npmcli/template-oss": "^2.4.2",
@@ -10753,12 +10780,12 @@
}
},
"workspaces/libnpmpublish": {
- "version": "5.0.0",
+ "version": "5.0.1",
"license": "ISC",
"dependencies": {
"normalize-package-data": "^3.0.2",
"npm-package-arg": "^8.1.2",
- "npm-registry-fetch": "^11.0.0",
+ "npm-registry-fetch": "^12.0.1",
"semver": "^7.1.3",
"ssri": "^8.0.1"
},
@@ -10774,10 +10801,10 @@
}
},
"workspaces/libnpmsearch": {
- "version": "4.0.0",
+ "version": "4.0.1",
"license": "ISC",
"dependencies": {
- "npm-registry-fetch": "^11.0.0"
+ "npm-registry-fetch": "^12.0.1"
},
"devDependencies": {
"@npmcli/template-oss": "^2.4.2",
@@ -10848,11 +10875,11 @@
}
},
"workspaces/libnpmteam": {
- "version": "3.0.0",
+ "version": "3.0.1",
"license": "ISC",
"dependencies": {
"aproba": "^2.0.0",
- "npm-registry-fetch": "^11.0.0"
+ "npm-registry-fetch": "^12.0.1"
},
"devDependencies": {
"@npmcli/template-oss": "^2.4.2",
@@ -11420,7 +11447,7 @@
"npm-install-checks": "^4.0.0",
"npm-package-arg": "^8.1.5",
"npm-pick-manifest": "^6.1.0",
- "npm-registry-fetch": "^11.0.0",
+ "npm-registry-fetch": "^12.0.1",
"pacote": "^12.0.2",
"parse-conflict-json": "^2.0.1",
"proc-log": "^1.0.0",
@@ -11600,9 +11627,9 @@
}
},
"@tootallnate/once": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz",
- "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw=="
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz",
+ "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A=="
},
"@types/hast": {
"version": "2.3.1",
@@ -13726,11 +13753,11 @@
"integrity": "sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ=="
},
"http-proxy-agent": {
- "version": "4.0.1",
- "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz",
- "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==",
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz",
+ "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==",
"requires": {
- "@tootallnate/once": "1",
+ "@tootallnate/once": "2",
"agent-base": "6",
"debug": "4"
}
@@ -14284,28 +14311,11 @@
"xml-name-validator": "^4.0.0"
},
"dependencies": {
- "@tootallnate/once": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-2.0.0.tgz",
- "integrity": "sha512-XCuKFP5PS55gnMVu3dty8KPatLqUoy/ZYzDzAGCQ8JNFCkLXzmI7vNHCR+XpbZaMWQK/vQubr7PkYq8g470J/A==",
- "dev": true
- },
"acorn": {
"version": "8.5.0",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.5.0.tgz",
"integrity": "sha512-yXbYeFy+jUuYd3/CDcg2NkIYE991XYX/bje7LmjJigUciaeO1JR4XxXgCIV1/Zc/dRuFEyw1L0pbA+qynJkW5Q==",
"dev": true
- },
- "http-proxy-agent": {
- "version": "5.0.0",
- "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-5.0.0.tgz",
- "integrity": "sha512-n2hY8YdoRE1i7r6M0w9DIw5GgZN0G25P8zLCRQ8rjXtTU3vsNFBI/vWK/UIeE6g5MUUz6avwAPXmL6Fy9D/90w==",
- "dev": true,
- "requires": {
- "@tootallnate/once": "2",
- "agent-base": "6",
- "debug": "4"
- }
}
}
},
@@ -14415,7 +14425,7 @@
"minipass": "^3.1.1",
"nock": "^12.0.1",
"npm-package-arg": "^8.1.2",
- "npm-registry-fetch": "^11.0.0",
+ "npm-registry-fetch": "^12.0.1",
"tap": "^15.1.0"
}
},
@@ -14468,7 +14478,7 @@
"@npmcli/template-oss": "^2.4.2",
"aproba": "^2.0.0",
"nock": "^9.6.1",
- "npm-registry-fetch": "^11.0.0",
+ "npm-registry-fetch": "^12.0.1",
"tap": "^15.1.0"
},
"dependencies": {
@@ -14528,7 +14538,7 @@
"aproba": "^2.0.0",
"minipass": "^3.1.1",
"nock": "^12.0.1",
- "npm-registry-fetch": "^11.0.0",
+ "npm-registry-fetch": "^12.0.1",
"tap": "^15.0.0"
}
},
@@ -14566,7 +14576,7 @@
"nock": "^12.0.2",
"normalize-package-data": "^3.0.2",
"npm-package-arg": "^8.1.2",
- "npm-registry-fetch": "^11.0.0",
+ "npm-registry-fetch": "^12.0.1",
"semver": "^7.1.3",
"ssri": "^8.0.1",
"tap": "^15"
@@ -14577,7 +14587,7 @@
"requires": {
"@npmcli/template-oss": "^2.4.2",
"nock": "^9.6.1",
- "npm-registry-fetch": "^11.0.0",
+ "npm-registry-fetch": "^12.0.1",
"tap": "^15"
},
"dependencies": {
@@ -14636,7 +14646,7 @@
"@npmcli/template-oss": "^2.4.2",
"aproba": "^2.0.0",
"nock": "^12.0.1",
- "npm-registry-fetch": "^11.0.0",
+ "npm-registry-fetch": "^12.0.1",
"tap": "^15"
}
},
@@ -14790,14 +14800,14 @@
"peer": true
},
"make-fetch-happen": {
- "version": "9.1.0",
- "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-9.1.0.tgz",
- "integrity": "sha512-+zopwDy7DNknmwPQplem5lAZX/eCOzSvSNNcSKm5eVwTkOBzoktEfXsa9L23J/GIRhxRsaxzkPEhrJEpE2F4Gg==",
+ "version": "10.0.0",
+ "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-10.0.0.tgz",
+ "integrity": "sha512-CREcDkbKZZ64g5MN1FT+u58mDHX9FQFFtFyio5HonX44BdQdytqPZBXUz+6ibi2w/6ncji59f2phyXGSMGpgzA==",
"requires": {
"agentkeepalive": "^4.1.3",
"cacache": "^15.2.0",
"http-cache-semantics": "^4.1.0",
- "http-proxy-agent": "^4.0.1",
+ "http-proxy-agent": "^5.0.0",
"https-proxy-agent": "^5.0.0",
"is-lambda": "^1.0.1",
"lru-cache": "^6.0.0",
@@ -14806,7 +14816,7 @@
"minipass-fetch": "^1.3.2",
"minipass-flush": "^1.0.5",
"minipass-pipeline": "^1.2.4",
- "negotiator": "^0.6.2",
+ "negotiator": "^0.6.3",
"promise-retry": "^2.0.1",
"socks-proxy-agent": "^6.0.0",
"ssri": "^8.0.0"
@@ -15026,9 +15036,9 @@
"dev": true
},
"negotiator": {
- "version": "0.6.2",
- "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz",
- "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw=="
+ "version": "0.6.3",
+ "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz",
+ "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg=="
},
"nock": {
"version": "12.0.3",
@@ -15080,6 +15090,46 @@
"semver": "^7.3.5",
"tar": "^6.1.2",
"which": "^2.0.2"
+ },
+ "dependencies": {
+ "@tootallnate/once": {
+ "version": "1.1.2",
+ "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz",
+ "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw=="
+ },
+ "http-proxy-agent": {
+ "version": "4.0.1",
+ "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz",
+ "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==",
+ "requires": {
+ "@tootallnate/once": "1",
+ "agent-base": "6",
+ "debug": "4"
+ }
+ },
+ "make-fetch-happen": {
+ "version": "9.1.0",
+ "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-9.1.0.tgz",
+ "integrity": "sha512-+zopwDy7DNknmwPQplem5lAZX/eCOzSvSNNcSKm5eVwTkOBzoktEfXsa9L23J/GIRhxRsaxzkPEhrJEpE2F4Gg==",
+ "requires": {
+ "agentkeepalive": "^4.1.3",
+ "cacache": "^15.2.0",
+ "http-cache-semantics": "^4.1.0",
+ "http-proxy-agent": "^4.0.1",
+ "https-proxy-agent": "^5.0.0",
+ "is-lambda": "^1.0.1",
+ "lru-cache": "^6.0.0",
+ "minipass": "^3.1.3",
+ "minipass-collect": "^1.0.2",
+ "minipass-fetch": "^1.3.2",
+ "minipass-flush": "^1.0.5",
+ "minipass-pipeline": "^1.2.4",
+ "negotiator": "^0.6.2",
+ "promise-retry": "^2.0.1",
+ "socks-proxy-agent": "^6.0.0",
+ "ssri": "^8.0.0"
+ }
+ }
}
},
"node-modules-regexp": {
@@ -15198,19 +15248,19 @@
}
},
"npm-profile": {
- "version": "5.0.4",
- "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-5.0.4.tgz",
- "integrity": "sha512-OKtU7yoAEBOnc8zJ+/uo5E4ugPp09sopo+6y1njPp+W99P8DvQon3BJYmpvyK2Bf1+3YV5LN1bvgXRoZ1LUJBA==",
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-6.0.0.tgz",
+ "integrity": "sha512-ZiwXx3Fq3wNqJMz+d4AVZ9X4xAw/0TqsvdiyDF5Y+h9lQ7AjYDOxY06lj7AP5vUBqB/7k/0oNFQOx03Ay0iHPw==",
"requires": {
- "npm-registry-fetch": "^11.0.0"
+ "npm-registry-fetch": "^12.0.0"
}
},
"npm-registry-fetch": {
- "version": "11.0.0",
- "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-11.0.0.tgz",
- "integrity": "sha512-jmlgSxoDNuhAtxUIG6pVwwtz840i994dL14FoNVZisrmZW5kWd63IUTNv1m/hyRSGSqWjCUp/YZlS1BJyNp9XA==",
+ "version": "12.0.1",
+ "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-12.0.1.tgz",
+ "integrity": "sha512-ricy4ezH3Uv0d4am6RSwHjCYTWJI74NJjurIigWMAG7Vs3PFyd0TUlkrez5L0AgaPzDLRsEzqb5cOZ/Ue01bmA==",
"requires": {
- "make-fetch-happen": "^9.0.1",
+ "make-fetch-happen": "^10.0.0",
"minipass": "^3.1.3",
"minipass-fetch": "^1.3.0",
"minipass-json-stream": "^1.0.1",
@@ -15472,9 +15522,9 @@
}
},
"pacote": {
- "version": "12.0.2",
- "resolved": "https://registry.npmjs.org/pacote/-/pacote-12.0.2.tgz",
- "integrity": "sha512-Ar3mhjcxhMzk+OVZ8pbnXdb0l8+pimvlsqBGRNkble2NVgyqOGE3yrCGi/lAYq7E7NRDMz89R1Wx5HIMCGgeYg==",
+ "version": "12.0.3",
+ "resolved": "https://registry.npmjs.org/pacote/-/pacote-12.0.3.tgz",
+ "integrity": "sha512-CdYEl03JDrRO3x18uHjBYA9TyoW8gy+ThVcypcDkxPtKlw76e4ejhYB6i9lJ+/cebbjpqPW/CijjqxwDTts8Ow==",
"requires": {
"@npmcli/git": "^2.1.0",
"@npmcli/installed-package-contents": "^1.0.6",
@@ -15489,7 +15539,7 @@
"npm-package-arg": "^8.0.1",
"npm-packlist": "^3.0.0",
"npm-pick-manifest": "^6.0.0",
- "npm-registry-fetch": "^11.0.0",
+ "npm-registry-fetch": "^12.0.0",
"promise-retry": "^2.0.1",
"read-package-json-fast": "^2.0.1",
"rimraf": "^3.0.2",
diff --git a/package.json b/package.json
index 508f615847123..2486a1aadf87e 100644
--- a/package.json
+++ b/package.json
@@ -1,5 +1,5 @@
{
- "version": "8.3.2",
+ "version": "8.4.0",
"name": "npm",
"description": "a package manager for JavaScript",
"workspaces": [
@@ -55,7 +55,7 @@
},
"dependencies": {
"@isaacs/string-locale-compare": "^1.1.0",
- "@npmcli/arborist": "^4.2.1",
+ "@npmcli/arborist": "^4.3.0",
"@npmcli/ci-detect": "^1.4.0",
"@npmcli/config": "^2.4.0",
"@npmcli/map-workspaces": "^2.0.0",
@@ -79,18 +79,18 @@
"init-package-json": "^2.0.5",
"is-cidr": "^4.0.2",
"json-parse-even-better-errors": "^2.3.1",
- "libnpmaccess": "^5.0.0",
+ "libnpmaccess": "^5.0.1",
"libnpmdiff": "^3.0.0",
- "libnpmexec": "^3.0.2",
+ "libnpmexec": "^3.0.3",
"libnpmfund": "^2.0.2",
- "libnpmhook": "^7.0.0",
- "libnpmorg": "^3.0.0",
+ "libnpmhook": "^7.0.1",
+ "libnpmorg": "^3.0.1",
"libnpmpack": "^3.0.1",
- "libnpmpublish": "^5.0.0",
- "libnpmsearch": "^4.0.0",
- "libnpmteam": "^3.0.0",
+ "libnpmpublish": "^5.0.1",
+ "libnpmsearch": "^4.0.1",
+ "libnpmteam": "^3.0.1",
"libnpmversion": "^2.0.2",
- "make-fetch-happen": "^9.1.0",
+ "make-fetch-happen": "^10.0.0",
"minipass": "^3.1.6",
"minipass-pipeline": "^1.2.4",
"mkdirp": "^1.0.4",
@@ -102,12 +102,12 @@
"npm-install-checks": "^4.0.0",
"npm-package-arg": "^8.1.5",
"npm-pick-manifest": "^6.1.1",
- "npm-profile": "^5.0.3",
- "npm-registry-fetch": "^11.0.0",
+ "npm-profile": "^6.0.0",
+ "npm-registry-fetch": "^12.0.1",
"npm-user-validate": "^1.0.1",
"npmlog": "^6.0.0",
"opener": "^1.5.2",
- "pacote": "^12.0.2",
+ "pacote": "^12.0.3",
"parse-conflict-json": "^2.0.1",
"proc-log": "^1.0.0",
"qrcode-terminal": "^0.12.0",
diff --git a/workspaces/arborist/lib/arborist/build-ideal-tree.js b/workspaces/arborist/lib/arborist/build-ideal-tree.js
index f20a554bd5ee8..0375e1851495a 100644
--- a/workspaces/arborist/lib/arborist/build-ideal-tree.js
+++ b/workspaces/arborist/lib/arborist/build-ideal-tree.js
@@ -269,6 +269,22 @@ module.exports = cls => class IdealTreeBuilder extends cls {
this[_complete] = !!options.complete
this[_preferDedupe] = !!options.preferDedupe
this[_legacyBundling] = !!options.legacyBundling
+
+ // validates list of update names, they must
+ // be dep names only, no semver ranges are supported
+ for (const name of update.names) {
+ const spec = npa(name)
+ const validationError =
+ new TypeError(`Update arguments must not contain package version specifiers
+
+Try using the package name instead, e.g:
+ npm update ${spec.name}`)
+ validationError.code = 'EUPDATEARGS'
+
+ if (spec.fetchSpec !== 'latest') {
+ throw validationError
+ }
+ }
this[_updateNames] = update.names
this[_updateAll] = update.all
@@ -320,7 +336,7 @@ module.exports = cls => class IdealTreeBuilder extends cls {
// Load on a new Arborist object, so the Nodes aren't the same,
// or else it'll get super confusing when we change them!
.then(async root => {
- if (!this[_updateAll] && !this[_global] && !root.meta.loadedFromDisk) {
+ if ((!this[_updateAll] && !this[_global] && !root.meta.loadedFromDisk) || (this[_global] && this[_updateNames].length)) {
await new this.constructor(this.options).loadActual({ root })
const tree = root.target
// even though we didn't load it from a package-lock.json FILE,
diff --git a/workspaces/arborist/lib/arborist/load-actual.js b/workspaces/arborist/lib/arborist/load-actual.js
index 0d260858d81c6..c06ed80265e02 100644
--- a/workspaces/arborist/lib/arborist/load-actual.js
+++ b/workspaces/arborist/lib/arborist/load-actual.js
@@ -212,7 +212,8 @@ module.exports = cls => class ActualLoader extends cls {
const promises = []
for (const path of tree.workspaces.values()) {
if (!this[_cache].has(path)) {
- const p = this[_loadFSNode]({ path, root: this[_actualTree] })
+ // workspace overrides use the root overrides
+ const p = this[_loadFSNode]({ path, root: this[_actualTree], useRootOverrides: true })
.then(node => this[_loadFSTree](node))
promises.push(p)
}
@@ -240,7 +241,7 @@ module.exports = cls => class ActualLoader extends cls {
this[_actualTree] = root
}
- [_loadFSNode] ({ path, parent, real, root, loadOverrides }) {
+ [_loadFSNode] ({ path, parent, real, root, loadOverrides, useRootOverrides }) {
if (!real) {
return realpath(path, this[_rpcache], this[_stcache])
.then(
@@ -250,6 +251,7 @@ module.exports = cls => class ActualLoader extends cls {
real,
root,
loadOverrides,
+ useRootOverrides,
}),
// if realpath fails, just provide a dummy error node
error => new Node({
@@ -289,6 +291,9 @@ module.exports = cls => class ActualLoader extends cls {
parent,
root,
loadOverrides,
+ ...(useRootOverrides && root.overrides
+ ? { overrides: root.overrides.getNodeRule({ name: pkg.name, version: pkg.version }) }
+ : {}),
})
})
.then(node => {
diff --git a/workspaces/arborist/lib/arborist/reify.js b/workspaces/arborist/lib/arborist/reify.js
index d5e70323830b6..45ef93985358b 100644
--- a/workspaces/arborist/lib/arborist/reify.js
+++ b/workspaces/arborist/lib/arborist/reify.js
@@ -5,6 +5,7 @@ const pacote = require('pacote')
const AuditReport = require('../audit-report.js')
const { subset, intersects } = require('semver')
const npa = require('npm-package-arg')
+const semver = require('semver')
const debug = require('../debug.js')
const walkUp = require('walk-up-path')
@@ -1273,6 +1274,21 @@ module.exports = cls => class Reifier extends cls {
}
}
+ // Returns true if any of the edges from this node has a semver
+ // range definition that is an exact match to the version installed
+ // e.g: should return true if for a given an installed version 1.0.0,
+ // range is either =1.0.0 or 1.0.0
+ const exactVersion = node => {
+ for (const edge of node.edgesIn) {
+ try {
+ if (semver.subset(edge.spec, node.version)) {
+ return false
+ }
+ } catch {}
+ }
+ return true
+ }
+
// helper that retrieves an array of nodes that were
// potentially updated during the reify process, in order
// to limit the number of nodes to check and update, only
@@ -1284,6 +1300,8 @@ module.exports = cls => class Reifier extends cls {
const filterDirectDependencies = node =>
!node.isRoot && node.resolveParent.isRoot
&& (!names || names.includes(node.name))
+ && exactVersion(node) // skip update for exact ranges
+
const directDeps = this.idealTree.inventory
.filter(filterDirectDependencies)
diff --git a/workspaces/arborist/lib/shrinkwrap.js b/workspaces/arborist/lib/shrinkwrap.js
index b45fea0ac6111..bb6971f7ad57a 100644
--- a/workspaces/arborist/lib/shrinkwrap.js
+++ b/workspaces/arborist/lib/shrinkwrap.js
@@ -476,8 +476,13 @@ class Shrinkwrap {
// all good! hidden lockfile is the newest thing in here.
return data
}).catch(er => {
- const rel = relpath(this.path, this.filename)
- this.log.verbose('shrinkwrap', `failed to load ${rel}`, er)
+ /* istanbul ignore else */
+ if (typeof this.filename === 'string') {
+ const rel = relpath(this.path, this.filename)
+ this.log.verbose('shrinkwrap', `failed to load ${rel}`, er)
+ } else {
+ this.log.verbose('shrinkwrap', `failed to load ${this.path}`, er)
+ }
this.loadingError = er
this.loadedFromDisk = false
this.ancientLockfile = false
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 493a0a78c5c46..5c33f71678a70 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -1,6 +1,6 @@
{
"name": "@npmcli/arborist",
- "version": "4.2.1",
+ "version": "4.3.0",
"description": "Manage node_modules trees",
"dependencies": {
"@isaacs/string-locale-compare": "^1.1.0",
@@ -22,7 +22,7 @@
"npm-install-checks": "^4.0.0",
"npm-package-arg": "^8.1.5",
"npm-pick-manifest": "^6.1.0",
- "npm-registry-fetch": "^11.0.0",
+ "npm-registry-fetch": "^12.0.1",
"pacote": "^12.0.2",
"parse-conflict-json": "^2.0.1",
"proc-log": "^1.0.0",
diff --git a/workspaces/arborist/tap-snapshots/test/arborist/build-ideal-tree.js.test.cjs b/workspaces/arborist/tap-snapshots/test/arborist/build-ideal-tree.js.test.cjs
index 93ea45862e8f8..b743dab958ffe 100644
--- a/workspaces/arborist/tap-snapshots/test/arborist/build-ideal-tree.js.test.cjs
+++ b/workspaces/arborist/tap-snapshots/test/arborist/build-ideal-tree.js.test.cjs
@@ -121522,6 +121522,66 @@ ArboristNode {
exports[`test/arborist/build-ideal-tree.js TAP update global > update a single dep 1`] = `
ArboristNode {
"children": Map {
+ "@isaacs/testing-dev-optional-flags" => ArboristNode {
+ "children": Map {
+ "own-or" => ArboristNode {
+ "edgesIn": Set {
+ EdgeIn {
+ "from": "node_modules/@isaacs/testing-dev-optional-flags",
+ "name": "own-or",
+ "spec": "^1.0.0",
+ "type": "prod",
+ },
+ },
+ "location": "node_modules/@isaacs/testing-dev-optional-flags/node_modules/own-or",
+ "name": "own-or",
+ "path": "{CWD}/test/arborist/tap-testdir-build-ideal-tree-update-global/node_modules/@isaacs/testing-dev-optional-flags/node_modules/own-or",
+ "version": "1.0.0",
+ },
+ "wrappy" => ArboristNode {
+ "edgesIn": Set {
+ EdgeIn {
+ "error": "INVALID",
+ "from": "node_modules/@isaacs/testing-dev-optional-flags",
+ "name": "wrappy",
+ "spec": "^1.0.2",
+ "type": "prod",
+ },
+ },
+ "location": "node_modules/@isaacs/testing-dev-optional-flags/node_modules/wrappy",
+ "name": "wrappy",
+ "path": "{CWD}/test/arborist/tap-testdir-build-ideal-tree-update-global/node_modules/@isaacs/testing-dev-optional-flags/node_modules/wrappy",
+ "version": "1.0.0",
+ },
+ },
+ "edgesIn": Set {
+ EdgeIn {
+ "from": "",
+ "name": "@isaacs/testing-dev-optional-flags",
+ "spec": "*",
+ "type": "prod",
+ },
+ },
+ "edgesOut": Map {
+ "own-or" => EdgeOut {
+ "name": "own-or",
+ "spec": "^1.0.0",
+ "to": "node_modules/@isaacs/testing-dev-optional-flags/node_modules/own-or",
+ "type": "prod",
+ },
+ "wrappy" => EdgeOut {
+ "error": "INVALID",
+ "name": "wrappy",
+ "spec": "^1.0.2",
+ "to": "node_modules/@isaacs/testing-dev-optional-flags/node_modules/wrappy",
+ "type": "prod",
+ },
+ },
+ "location": "node_modules/@isaacs/testing-dev-optional-flags",
+ "name": "@isaacs/testing-dev-optional-flags",
+ "path": "{CWD}/test/arborist/tap-testdir-build-ideal-tree-update-global/node_modules/@isaacs/testing-dev-optional-flags",
+ "version": "1.0.0",
+ },
"once" => ArboristNode {
"children": Map {
"wrappy" => ArboristNode {
@@ -121536,8 +121596,7 @@ ArboristNode {
"location": "node_modules/once/node_modules/wrappy",
"name": "wrappy",
"path": "{CWD}/test/arborist/tap-testdir-build-ideal-tree-update-global/node_modules/once/node_modules/wrappy",
- "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
- "version": "1.0.2",
+ "version": "1.0.1",
},
},
"edgesIn": Set {
@@ -121564,6 +121623,12 @@ ArboristNode {
},
},
"edgesOut": Map {
+ "@isaacs/testing-dev-optional-flags" => EdgeOut {
+ "name": "@isaacs/testing-dev-optional-flags",
+ "spec": "*",
+ "to": "node_modules/@isaacs/testing-dev-optional-flags",
+ "type": "prod",
+ },
"once" => EdgeOut {
"name": "once",
"spec": "*",
@@ -121706,8 +121771,245 @@ ArboristNode {
}
`
+exports[`test/arborist/build-ideal-tree.js TAP update global > updating missing dep should have no effect 1`] = `
+ArboristNode {
+ "children": Map {
+ "@isaacs/testing-dev-optional-flags" => ArboristNode {
+ "children": Map {
+ "own-or" => ArboristNode {
+ "edgesIn": Set {
+ EdgeIn {
+ "from": "node_modules/@isaacs/testing-dev-optional-flags",
+ "name": "own-or",
+ "spec": "^1.0.0",
+ "type": "prod",
+ },
+ },
+ "location": "node_modules/@isaacs/testing-dev-optional-flags/node_modules/own-or",
+ "name": "own-or",
+ "path": "{CWD}/test/arborist/tap-testdir-build-ideal-tree-update-global/node_modules/@isaacs/testing-dev-optional-flags/node_modules/own-or",
+ "version": "1.0.0",
+ },
+ "wrappy" => ArboristNode {
+ "edgesIn": Set {
+ EdgeIn {
+ "error": "INVALID",
+ "from": "node_modules/@isaacs/testing-dev-optional-flags",
+ "name": "wrappy",
+ "spec": "^1.0.2",
+ "type": "prod",
+ },
+ },
+ "location": "node_modules/@isaacs/testing-dev-optional-flags/node_modules/wrappy",
+ "name": "wrappy",
+ "path": "{CWD}/test/arborist/tap-testdir-build-ideal-tree-update-global/node_modules/@isaacs/testing-dev-optional-flags/node_modules/wrappy",
+ "version": "1.0.0",
+ },
+ },
+ "edgesIn": Set {
+ EdgeIn {
+ "from": "",
+ "name": "@isaacs/testing-dev-optional-flags",
+ "spec": "*",
+ "type": "prod",
+ },
+ },
+ "edgesOut": Map {
+ "own-or" => EdgeOut {
+ "name": "own-or",
+ "spec": "^1.0.0",
+ "to": "node_modules/@isaacs/testing-dev-optional-flags/node_modules/own-or",
+ "type": "prod",
+ },
+ "wrappy" => EdgeOut {
+ "error": "INVALID",
+ "name": "wrappy",
+ "spec": "^1.0.2",
+ "to": "node_modules/@isaacs/testing-dev-optional-flags/node_modules/wrappy",
+ "type": "prod",
+ },
+ },
+ "location": "node_modules/@isaacs/testing-dev-optional-flags",
+ "name": "@isaacs/testing-dev-optional-flags",
+ "path": "{CWD}/test/arborist/tap-testdir-build-ideal-tree-update-global/node_modules/@isaacs/testing-dev-optional-flags",
+ "version": "1.0.0",
+ },
+ "once" => ArboristNode {
+ "children": Map {
+ "wrappy" => ArboristNode {
+ "edgesIn": Set {
+ EdgeIn {
+ "from": "node_modules/once",
+ "name": "wrappy",
+ "spec": "1",
+ "type": "prod",
+ },
+ },
+ "location": "node_modules/once/node_modules/wrappy",
+ "name": "wrappy",
+ "path": "{CWD}/test/arborist/tap-testdir-build-ideal-tree-update-global/node_modules/once/node_modules/wrappy",
+ "version": "1.0.1",
+ },
+ },
+ "edgesIn": Set {
+ EdgeIn {
+ "from": "",
+ "name": "once",
+ "spec": "*",
+ "type": "prod",
+ },
+ },
+ "edgesOut": Map {
+ "wrappy" => EdgeOut {
+ "name": "wrappy",
+ "spec": "1",
+ "to": "node_modules/once/node_modules/wrappy",
+ "type": "prod",
+ },
+ },
+ "location": "node_modules/once",
+ "name": "once",
+ "path": "{CWD}/test/arborist/tap-testdir-build-ideal-tree-update-global/node_modules/once",
+ "version": "1.3.1",
+ },
+ },
+ "edgesOut": Map {
+ "@isaacs/testing-dev-optional-flags" => EdgeOut {
+ "name": "@isaacs/testing-dev-optional-flags",
+ "spec": "*",
+ "to": "node_modules/@isaacs/testing-dev-optional-flags",
+ "type": "prod",
+ },
+ "once" => EdgeOut {
+ "name": "once",
+ "spec": "*",
+ "to": "node_modules/once",
+ "type": "prod",
+ },
+ },
+ "isProjectRoot": true,
+ "location": "",
+ "name": "tap-testdir-build-ideal-tree-update-global",
+ "path": "{CWD}/test/arborist/tap-testdir-build-ideal-tree-update-global",
+}
+`
+
exports[`test/arborist/build-ideal-tree.js TAP update global > updating sub-dep has no effect 1`] = `
ArboristNode {
+ "children": Map {
+ "@isaacs/testing-dev-optional-flags" => ArboristNode {
+ "children": Map {
+ "own-or" => ArboristNode {
+ "edgesIn": Set {
+ EdgeIn {
+ "from": "node_modules/@isaacs/testing-dev-optional-flags",
+ "name": "own-or",
+ "spec": "^1.0.0",
+ "type": "prod",
+ },
+ },
+ "location": "node_modules/@isaacs/testing-dev-optional-flags/node_modules/own-or",
+ "name": "own-or",
+ "path": "{CWD}/test/arborist/tap-testdir-build-ideal-tree-update-global/node_modules/@isaacs/testing-dev-optional-flags/node_modules/own-or",
+ "version": "1.0.0",
+ },
+ "wrappy" => ArboristNode {
+ "edgesIn": Set {
+ EdgeIn {
+ "error": "INVALID",
+ "from": "node_modules/@isaacs/testing-dev-optional-flags",
+ "name": "wrappy",
+ "spec": "^1.0.2",
+ "type": "prod",
+ },
+ },
+ "location": "node_modules/@isaacs/testing-dev-optional-flags/node_modules/wrappy",
+ "name": "wrappy",
+ "path": "{CWD}/test/arborist/tap-testdir-build-ideal-tree-update-global/node_modules/@isaacs/testing-dev-optional-flags/node_modules/wrappy",
+ "version": "1.0.0",
+ },
+ },
+ "edgesIn": Set {
+ EdgeIn {
+ "from": "",
+ "name": "@isaacs/testing-dev-optional-flags",
+ "spec": "*",
+ "type": "prod",
+ },
+ },
+ "edgesOut": Map {
+ "own-or" => EdgeOut {
+ "name": "own-or",
+ "spec": "^1.0.0",
+ "to": "node_modules/@isaacs/testing-dev-optional-flags/node_modules/own-or",
+ "type": "prod",
+ },
+ "wrappy" => EdgeOut {
+ "error": "INVALID",
+ "name": "wrappy",
+ "spec": "^1.0.2",
+ "to": "node_modules/@isaacs/testing-dev-optional-flags/node_modules/wrappy",
+ "type": "prod",
+ },
+ },
+ "location": "node_modules/@isaacs/testing-dev-optional-flags",
+ "name": "@isaacs/testing-dev-optional-flags",
+ "path": "{CWD}/test/arborist/tap-testdir-build-ideal-tree-update-global/node_modules/@isaacs/testing-dev-optional-flags",
+ "version": "1.0.0",
+ },
+ "once" => ArboristNode {
+ "children": Map {
+ "wrappy" => ArboristNode {
+ "edgesIn": Set {
+ EdgeIn {
+ "from": "node_modules/once",
+ "name": "wrappy",
+ "spec": "1",
+ "type": "prod",
+ },
+ },
+ "location": "node_modules/once/node_modules/wrappy",
+ "name": "wrappy",
+ "path": "{CWD}/test/arborist/tap-testdir-build-ideal-tree-update-global/node_modules/once/node_modules/wrappy",
+ "version": "1.0.1",
+ },
+ },
+ "edgesIn": Set {
+ EdgeIn {
+ "from": "",
+ "name": "once",
+ "spec": "*",
+ "type": "prod",
+ },
+ },
+ "edgesOut": Map {
+ "wrappy" => EdgeOut {
+ "name": "wrappy",
+ "spec": "1",
+ "to": "node_modules/once/node_modules/wrappy",
+ "type": "prod",
+ },
+ },
+ "location": "node_modules/once",
+ "name": "once",
+ "path": "{CWD}/test/arborist/tap-testdir-build-ideal-tree-update-global/node_modules/once",
+ "version": "1.3.1",
+ },
+ },
+ "edgesOut": Map {
+ "@isaacs/testing-dev-optional-flags" => EdgeOut {
+ "name": "@isaacs/testing-dev-optional-flags",
+ "spec": "*",
+ "to": "node_modules/@isaacs/testing-dev-optional-flags",
+ "type": "prod",
+ },
+ "once" => EdgeOut {
+ "name": "once",
+ "spec": "*",
+ "to": "node_modules/once",
+ "type": "prod",
+ },
+ },
"isProjectRoot": true,
"location": "",
"name": "tap-testdir-build-ideal-tree-update-global",
diff --git a/workspaces/arborist/test/arborist/build-ideal-tree.js b/workspaces/arborist/test/arborist/build-ideal-tree.js
index 2c058a6a3283e..368df05bcfaf4 100644
--- a/workspaces/arborist/test/arborist/build-ideal-tree.js
+++ b/workspaces/arborist/test/arborist/build-ideal-tree.js
@@ -2092,8 +2092,28 @@ t.test('update global', async t => {
},
},
})
+
+ t.matchSnapshot(await printIdeal(path, { global: true, update: ['abbrev'] }),
+ 'updating missing dep should have no effect')
+
t.matchSnapshot(await printIdeal(path, { global: true, update: ['wrappy'] }),
'updating sub-dep has no effect')
+
+ const invalidArgs = [
+ 'once@1.4.0',
+ 'once@next',
+ 'once@^1.0.0',
+ 'once@>=2.0.0',
+ 'once@2',
+ ]
+ for (const updateName of invalidArgs) {
+ t.rejects(
+ printIdeal(path, { global: true, update: [updateName] }),
+ { code: 'EUPDATEARGS' },
+ 'should throw an error when using semver ranges'
+ )
+ }
+
t.matchSnapshot(await printIdeal(path, { global: true, update: ['once'] }),
'update a single dep')
t.matchSnapshot(await printIdeal(path, { global: true, update: true }),
@@ -3724,6 +3744,50 @@ t.test('overrides', t => {
t.equal(bcEdge.to.version, '2.0.0', 'b->c is 2.0.0')
})
+ t.test('overrides a workspace dependency', async (t) => {
+ generateNocks(t, {
+ bar: {
+ versions: ['1.0.0', '1.0.1', '2.0.0'],
+ },
+ })
+
+ const path = t.testdir({
+ 'package.json': JSON.stringify({
+ name: 'root',
+ dependencies: {
+ foo: '1.0.1',
+ },
+ overrides: {
+ bar: '2.0.0',
+ },
+ workspaces: [
+ './workspaces/*',
+ ],
+ }),
+ workspaces: {
+ foo: {
+ 'package.json': JSON.stringify({
+ name: 'foo',
+ version: '1.0.1',
+ dependencies: {
+ bar: '1.0.0',
+ },
+ }),
+ },
+ },
+ })
+
+ const tree = await buildIdeal(path)
+
+ const fooEdge = tree.edgesOut.get('foo')
+ t.equal(fooEdge.valid, true)
+
+ // fooEdge.to is a link, so we need to look at the target for edgesOut
+ const fooBarEdge = fooEdge.to.target.edgesOut.get('bar')
+ t.equal(fooBarEdge.valid, true)
+ t.equal(fooBarEdge.to.version, '2.0.0')
+ })
+
t.end()
})
diff --git a/workspaces/arborist/test/arborist/load-actual.js b/workspaces/arborist/test/arborist/load-actual.js
index 0da044c5aee4a..464bdc77ba058 100644
--- a/workspaces/arborist/test/arborist/load-actual.js
+++ b/workspaces/arborist/test/arborist/load-actual.js
@@ -422,3 +422,33 @@ t.test('load global space with link deps', async t => {
},
})
})
+
+t.test('loading a workspace maintains overrides', async t => {
+ const path = t.testdir({
+ 'package.json': JSON.stringify({
+ name: 'root',
+ version: '1.0.0',
+ dependencies: {
+ foo: '1.0.0',
+ },
+ overrides: {
+ bar: '2.0.0',
+ },
+ workspaces: ['./foo'],
+ }),
+ foo: {
+ 'package.json': JSON.stringify({
+ name: 'foo',
+ version: '1.0.0',
+ dependencies: {
+ bar: '1.0.0',
+ },
+ }),
+ },
+ })
+
+ const tree = await loadActual(path)
+
+ const fooEdge = tree.edgesOut.get('foo')
+ t.equal(tree.overrides, fooEdge.overrides, 'foo edge got the correct overrides')
+})
diff --git a/workspaces/arborist/test/arborist/reify.js b/workspaces/arborist/test/arborist/reify.js
index d5fc166a5636d..caa15f59f2476 100644
--- a/workspaces/arborist/test/arborist/reify.js
+++ b/workspaces/arborist/test/arborist/reify.js
@@ -2572,5 +2572,34 @@ t.test('save package.json on update', t => {
)
})
+ t.test('should preserve exact ranges', async t => {
+ const path = fixture(t, 'update-exact-version')
+
+ await reify(path, { update: true, save: true })
+
+ t.equal(
+ require(resolve(path, 'package.json')).dependencies.abbrev,
+ '1.0.4',
+ 'should save no top level dep update to root package.json'
+ )
+ })
+
+ t.test('should preserve exact ranges, missing actual tree', async t => {
+ const path = t.testdir({
+ 'package.json': JSON.stringify({
+ dependencies: {
+ abbrev: '1.0.4',
+ },
+ }),
+ })
+
+ await reify(path, { update: true, save: true })
+
+ t.equal(
+ require(resolve(path, 'package.json')).dependencies.abbrev,
+ '1.0.4',
+ 'should save no top level dep update to root package.json'
+ )
+ })
t.end()
})
diff --git a/workspaces/arborist/test/fixtures/reify-cases/update-exact-version.js b/workspaces/arborist/test/fixtures/reify-cases/update-exact-version.js
new file mode 100644
index 0000000000000..d766d3bc915ff
--- /dev/null
+++ b/workspaces/arborist/test/fixtures/reify-cases/update-exact-version.js
@@ -0,0 +1,54 @@
+// generated from test/fixtures/update-exact-version
+module.exports = t => {
+ const path = t.testdir({
+ "node_modules": {
+ "abbrev": {
+ "package.json": JSON.stringify({
+ "name": "abbrev",
+ "version": "1.0.4",
+ "description": "Like ruby's abbrev module, but in js",
+ "author": "Isaac Z. Schlueter ",
+ "main": "./lib/abbrev.js",
+ "scripts": {
+ "test": "node lib/abbrev.js"
+ },
+ "repository": "http://github.com/isaacs/abbrev-js",
+ "license": {
+ "type": "MIT",
+ "url": "https://github.com/isaacs/abbrev-js/raw/master/LICENSE"
+ }
+ })
+ }
+ },
+ "package-lock.json": JSON.stringify({
+ "name": "update-exact-version",
+ "lockfileVersion": 2,
+ "requires": true,
+ "packages": {
+ "": {
+ "dependencies": {
+ "abbrev": "1.0.4"
+ }
+ },
+ "node_modules/abbrev": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.4.tgz",
+ "integrity": "sha1-vVWuXkE7oXIu5Mq6H26hBBSlns0="
+ }
+ },
+ "dependencies": {
+ "abbrev": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.4.tgz",
+ "integrity": "sha1-vVWuXkE7oXIu5Mq6H26hBBSlns0="
+ }
+ }
+ }),
+ "package.json": JSON.stringify({
+ "dependencies": {
+ "abbrev": "1.0.4"
+ }
+ })
+})
+ return path
+}
diff --git a/workspaces/arborist/test/fixtures/update-exact-version/node_modules/abbrev/package.json b/workspaces/arborist/test/fixtures/update-exact-version/node_modules/abbrev/package.json
new file mode 100644
index 0000000000000..72042a5b907f4
--- /dev/null
+++ b/workspaces/arborist/test/fixtures/update-exact-version/node_modules/abbrev/package.json
@@ -0,0 +1,15 @@
+{
+ "name": "abbrev",
+ "version": "1.0.4",
+ "description": "Like ruby's abbrev module, but in js",
+ "author": "Isaac Z. Schlueter ",
+ "main": "./lib/abbrev.js",
+ "scripts": {
+ "test": "node lib/abbrev.js"
+ },
+ "repository": "http://github.com/isaacs/abbrev-js",
+ "license": {
+ "type": "MIT",
+ "url": "https://github.com/isaacs/abbrev-js/raw/master/LICENSE"
+ }
+}
diff --git a/workspaces/arborist/test/fixtures/update-exact-version/package-lock.json b/workspaces/arborist/test/fixtures/update-exact-version/package-lock.json
new file mode 100644
index 0000000000000..0d7b5f6472ebf
--- /dev/null
+++ b/workspaces/arborist/test/fixtures/update-exact-version/package-lock.json
@@ -0,0 +1,24 @@
+{
+ "name": "update-exact-version",
+ "lockfileVersion": 2,
+ "requires": true,
+ "packages": {
+ "": {
+ "dependencies": {
+ "abbrev": "1.0.4"
+ }
+ },
+ "node_modules/abbrev": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.4.tgz",
+ "integrity": "sha1-vVWuXkE7oXIu5Mq6H26hBBSlns0="
+ }
+ },
+ "dependencies": {
+ "abbrev": {
+ "version": "1.0.4",
+ "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.0.4.tgz",
+ "integrity": "sha1-vVWuXkE7oXIu5Mq6H26hBBSlns0="
+ }
+ }
+}
diff --git a/workspaces/arborist/test/fixtures/update-exact-version/package.json b/workspaces/arborist/test/fixtures/update-exact-version/package.json
new file mode 100644
index 0000000000000..4fa41479389d6
--- /dev/null
+++ b/workspaces/arborist/test/fixtures/update-exact-version/package.json
@@ -0,0 +1,5 @@
+{
+ "dependencies": {
+ "abbrev": "1.0.4"
+ }
+}
diff --git a/workspaces/arborist/test/shrinkwrap.js b/workspaces/arborist/test/shrinkwrap.js
index f752c724a35e7..d47266d30e1c1 100644
--- a/workspaces/arborist/test/shrinkwrap.js
+++ b/workspaces/arborist/test/shrinkwrap.js
@@ -1597,4 +1597,19 @@ t.test('setting lockfileVersion from the file contents', async t => {
})
t.equal(Shrinkwrap.defaultLockfileVersion, 2, 'default is 2')
+
+ t.test('load should return error correctly when it cant access folder',
+ { skip: process.platform === 'win32' ? 'skip chmod in windows' : false },
+ async t => {
+ const dir = t.testdir({})
+ try {
+ fs.chmodSync(dir, '000')
+ const res = await Shrinkwrap.load({ path: dir })
+ t.ok(res.loadingError, 'loading error should exist')
+ t.strictSame(res.loadingError.errno, -13)
+ t.strictSame(res.loadingError.code, 'EACCES')
+ } finally {
+ fs.chmodSync(dir, '666')
+ }
+ })
})
diff --git a/workspaces/libnpmaccess/package.json b/workspaces/libnpmaccess/package.json
index 8d2ba3ad765fd..760da6cc1be87 100644
--- a/workspaces/libnpmaccess/package.json
+++ b/workspaces/libnpmaccess/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmaccess",
- "version": "5.0.0",
+ "version": "5.0.1",
"description": "programmatic library for `npm access` commands",
"author": "GitHub Inc.",
"license": "ISC",
@@ -32,7 +32,7 @@
"aproba": "^2.0.0",
"minipass": "^3.1.1",
"npm-package-arg": "^8.1.2",
- "npm-registry-fetch": "^11.0.0"
+ "npm-registry-fetch": "^12.0.1"
},
"engines": {
"node": "^12.13.0 || ^14.15.0 || >=16"
diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json
index ff728b5473bc9..1de0cdfe26a95 100644
--- a/workspaces/libnpmexec/package.json
+++ b/workspaces/libnpmexec/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmexec",
- "version": "3.0.2",
+ "version": "3.0.3",
"files": [
"bin",
"lib"
diff --git a/workspaces/libnpmhook/package.json b/workspaces/libnpmhook/package.json
index a46de40ac9828..4f30555273205 100644
--- a/workspaces/libnpmhook/package.json
+++ b/workspaces/libnpmhook/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmhook",
- "version": "7.0.0",
+ "version": "7.0.1",
"description": "programmatic API for managing npm registry hooks",
"main": "lib/index.js",
"files": [
@@ -34,7 +34,7 @@
"license": "ISC",
"dependencies": {
"aproba": "^2.0.0",
- "npm-registry-fetch": "^11.0.0"
+ "npm-registry-fetch": "^12.0.1"
},
"devDependencies": {
"@npmcli/template-oss": "^2.4.2",
diff --git a/workspaces/libnpmorg/package.json b/workspaces/libnpmorg/package.json
index 93297c36338d2..5c4909b1c9505 100644
--- a/workspaces/libnpmorg/package.json
+++ b/workspaces/libnpmorg/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmorg",
- "version": "3.0.0",
+ "version": "3.0.1",
"description": "Programmatic api for `npm org` commands",
"author": "GitHub Inc.",
"main": "lib/index.js",
@@ -45,7 +45,7 @@
"homepage": "https://npmjs.com/package/libnpmorg",
"dependencies": {
"aproba": "^2.0.0",
- "npm-registry-fetch": "^11.0.0"
+ "npm-registry-fetch": "^12.0.1"
},
"engines": {
"node": "^12.13.0 || ^14.15.0 || >=16"
diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json
index 156503af7d3dd..3fd2d6d5a3961 100644
--- a/workspaces/libnpmpublish/package.json
+++ b/workspaces/libnpmpublish/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmpublish",
- "version": "5.0.0",
+ "version": "5.0.1",
"description": "Programmatic API for the bits behind npm publish and unpublish",
"author": "GitHub Inc.",
"main": "lib/index.js",
@@ -37,14 +37,14 @@
},
"repository": {
"type": "git",
- "url": "https://github.com/npm/libnpmpublish.git"
+ "url": "https://github.com/npm/cli.git"
},
- "bugs": "https://github.com/npm/libnpmpublish/issues",
+ "bugs": "https://github.com/npm/cli/issues",
"homepage": "https://npmjs.com/package/libnpmpublish",
"dependencies": {
"normalize-package-data": "^3.0.2",
"npm-package-arg": "^8.1.2",
- "npm-registry-fetch": "^11.0.0",
+ "npm-registry-fetch": "^12.0.1",
"semver": "^7.1.3",
"ssri": "^8.0.1"
},
diff --git a/workspaces/libnpmsearch/package.json b/workspaces/libnpmsearch/package.json
index 5479e41ae3c3d..f524426dc65f8 100644
--- a/workspaces/libnpmsearch/package.json
+++ b/workspaces/libnpmsearch/package.json
@@ -1,6 +1,6 @@
{
"name": "libnpmsearch",
- "version": "4.0.0",
+ "version": "4.0.1",
"description": "Programmatic API for searching in npm and compatible registries.",
"author": "GitHub Inc.",
"main": "lib/index.js",
@@ -41,7 +41,7 @@
"bugs": "https://github.com/npm/libnpmsearch/issues",
"homepage": "https://npmjs.com/package/libnpmsearch",
"dependencies": {
- "npm-registry-fetch": "^11.0.0"
+ "npm-registry-fetch": "^12.0.1"
},
"engines": {
"node": "^12.13.0 || ^14.15.0 || >=16"
diff --git a/workspaces/libnpmteam/package.json b/workspaces/libnpmteam/package.json
index 1264402321ee7..23903551aa8c1 100644
--- a/workspaces/libnpmteam/package.json
+++ b/workspaces/libnpmteam/package.json
@@ -1,7 +1,7 @@
{
"name": "libnpmteam",
"description": "npm Team management APIs",
- "version": "3.0.0",
+ "version": "3.0.1",
"author": "GitHub Inc.",
"license": "ISC",
"main": "lib/index.js",
@@ -32,7 +32,7 @@
"homepage": "https://npmjs.com/package/libnpmteam",
"dependencies": {
"aproba": "^2.0.0",
- "npm-registry-fetch": "^11.0.0"
+ "npm-registry-fetch": "^12.0.1"
},
"engines": {
"node": "^12.13.0 || ^14.15.0 || >=16"