diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml
deleted file mode 100644
index 683c40a9f98..00000000000
--- a/.buildkite/pipeline.yml
+++ /dev/null
@@ -1,488 +0,0 @@
-env:
- ANKA_MOJAVE_TEMPLATE: "10.14.4_6C_14G_40G"
- ANKA_TEMPLATE_TAG: "clean::cicd::git-ssh::nas::brew::buildkite-agent-3.11.2"
- CHECKSUMABLE: "scripts/eosio_build*"
- MAC_TAG: "eosio_2-5"
- WORKDIR: "/data/job"
-
-steps:
- - wait
-
- - label: ":darwin: Ensure Mojave Anka Template Dependency Tag/Layer Exists"
- command:
- - "git clone git@github.com:EOSIO/mac-anka-fleet.git"
- - "cd mac-anka-fleet && . ./ensure_tag.bash -u 12 -r 25G -a '-n'"
- agents:
- - "queue=mac-anka-templater-fleet"
- env:
- REPO: "${BUILDKITE_PULL_REQUEST_REPO:-$BUILDKITE_REPO}"
- REPO_COMMIT: "${BUILDKITE_COMMIT}"
- CHECKSUMABLE: "${CHECKSUMABLE}"
- TEMPLATE: "${ANKA_MOJAVE_TEMPLATE}"
- TEMPLATE_TAG: "${ANKA_TEMPLATE_TAG}"
- TAG_COMMANDS: "git clone ${BUILDKITE_PULL_REQUEST_REPO:-$BUILDKITE_REPO} && cd eos && git checkout ${BUILDKITE_COMMIT} && git submodule update --init --recursive && ./scripts/eosio_build.sh -y -P -f -m && cd .. && rm -rf eos" # CLONED_REPO_DIR IS REQUIRED and is where the repo is always cloned into
- PROJECT_TAG: "${MAC_TAG}"
- timeout: 320
- skip: $SKIP_MOJAVE
-
- - wait
-
- - label: ":darwin: [Darwin] Mojave Build"
- command: |
- git clone ${BUILDKITE_PULL_REQUEST_REPO:-$BUILDKITE_REPO} .
- git checkout ${BUILDKITE_COMMIT}
- git submodule update --init --recursive
- ./scripts/eosio_build.sh -y -P -m
- tar -pczf build.tar.gz build
- [[ -f build.tar.gz ]] && buildkite-agent artifact upload build.tar.gz || (echo '+++ :no_entry: ERROR: No build.tar.gz artifact found, or artifact upload failed!'; echo '$$ ls -la'; ls -la; exit 1)
- plugins:
- chef/anka#v0.4.4:
- no-volume: true
- workdir: $WORKDIR
- inherit-environment-vars: true
- vm-name: $ANKA_MOJAVE_TEMPLATE
- vm-registry-tag: "${ANKA_TEMPLATE_TAG}::${MAC_TAG}"
- modify-cpu: 12
- modify-ram: 24
- always-pull: true
- debug: true
- wait-network: true
- agents:
- - "queue=mac-anka-large-node-fleet"
- timeout: 120
- skip: $SKIP_MOJAVE
-
- - label: ":aws: Amazon Linux 2 Build"
- command: |
- ./scripts/eosio_build.sh -y -P -m
- tar -pczf build.tar.gz build
- [[ ! -f build.tar.gz ]] && (echo '+++ :no_entry: ERROR: No build.tar.gz artifact found!'; echo '$$ ls -la'; ls -la; exit 1) || :
- agents:
- queue: "automation-large-builder-fleet"
- artifact_paths: "build.tar.gz"
- plugins:
- ecr#v1.1.4:
- login: true
- account_ids: "436617320021"
- no-include-email: true
- region: "us-west-2"
- docker#v3.1.0:
- shell: ["/bin/bash", "-i", "-e", "-c"]
- debug: true
- image: "436617320021.dkr.ecr.us-west-2.amazonaws.com/ci:amazonlinux2_2-5"
- propagate-environment: true
- workdir: /data/job
- timeout: 120
-
- - label: ":centos: CentOS 7 Build"
- command: |
- ./scripts/eosio_build.sh -y -P -m
- tar -pczf build.tar.gz build
- [[ ! -f build.tar.gz ]] && (echo '+++ :no_entry: ERROR: No build.tar.gz artifact found!'; echo '$$ ls -la'; ls -la; exit 1) || :
- agents:
- queue: "automation-large-builder-fleet"
- artifact_paths: "build.tar.gz"
- plugins:
- ecr#v1.1.4:
- login: true
- account_ids: "436617320021"
- no-include-email: true
- region: "us-west-2"
- docker#v3.2.0:
- shell: ["/bin/bash", "-i", "-e", "-c"]
- debug: true
- image: "436617320021.dkr.ecr.us-west-2.amazonaws.com/ci:centos7_2-5"
- propagate-environment: true
- workdir: /data/job
- timeout: 120
-
- - label: ":ubuntu: Ubuntu 16.04 Build"
- command: |
- ./scripts/eosio_build.sh -y -P -m
- tar -pczf build.tar.gz build
- [[ ! -f build.tar.gz ]] && (echo '+++ :no_entry: ERROR: No build.tar.gz artifact found!'; echo '$$ ls -la'; ls -la; exit 1) || :
- label: ":ubuntu: Ubuntu 16.04 Build"
- agents:
- queue: "automation-large-builder-fleet"
- artifact_paths: "build.tar.gz"
- plugins:
- ecr#v1.1.4:
- login: true
- account_ids: "436617320021"
- no-include-email: true
- region: "us-west-2"
- docker#v3.2.0:
- shell: ["/bin/bash", "-i", "-e", "-c"]
- debug: true
- image: "436617320021.dkr.ecr.us-west-2.amazonaws.com/ci:ubuntu16_2-5"
- propagate-environment: true
- workdir: /data/job
- timeout: 120
-
- - label: ":ubuntu: Ubuntu 18.04 Build"
- command: |
- ./scripts/eosio_build.sh -y -P -m
- tar -pczf build.tar.gz build
- [[ ! -f build.tar.gz ]] && (echo '+++ :no_entry: ERROR: No build.tar.gz artifact found!'; echo '$$ ls -la'; ls -la; exit 1) || :
- agents:
- queue: "automation-large-builder-fleet"
- artifact_paths: "build.tar.gz"
- plugins:
- ecr#v1.1.4:
- login: true
- account_ids: "436617320021"
- no-include-email: true
- region: "us-west-2"
- docker#v3.2.0:
- shell: ["/bin/bash", "-i", "-e", "-c"]
- debug: true
- image: "436617320021.dkr.ecr.us-west-2.amazonaws.com/ci:ubuntu18_2-5"
- propagate-environment: true
- workdir: /data/job
- timeout: 120
-
- - wait
-
- # MacOS Mojave
- - label: ":darwin: [Darwin] Mojave Tests"
- command: |
- git clone ${BUILDKITE_PULL_REQUEST_REPO:-$BUILDKITE_REPO} .
- git checkout ${BUILDKITE_COMMIT}
- buildkite-agent artifact download build.tar.gz . --step ":darwin: [Darwin] Mojave Build"
- ./scripts/parallel-test.sh
- agents:
- - "queue=mac-anka-node-fleet"
- plugins:
- chef/anka#v0.4.4:
- no-volume: true
- workdir: $WORKDIR
- inherit-environment-vars: true
- bash-interactive: true
- vm-name: $ANKA_MOJAVE_TEMPLATE
- vm-registry-tag: "${ANKA_TEMPLATE_TAG}::${MAC_TAG}"
- always-pull: true
- debug: true
- wait-network: true
- timeout: 120
- skip: $SKIP_MOJAVE$SKIP_TESTS
-
- - label: ":darwin: [Darwin] Mojave NP Tests"
- command: |
- git clone ${BUILDKITE_PULL_REQUEST_REPO:-$BUILDKITE_REPO} .
- git checkout ${BUILDKITE_COMMIT}
- buildkite-agent artifact download build.tar.gz . --step ":darwin: [Darwin] Mojave Build"
- ./scripts/serial-test.sh
- agents:
- - "queue=mac-anka-node-fleet"
- plugins:
- chef/anka#v0.4.4:
- no-volume: true
- workdir: $WORKDIR
- inherit-environment-vars: true
- bash-interactive: true
- vm-name: $ANKA_MOJAVE_TEMPLATE
- vm-registry-tag: "${ANKA_TEMPLATE_TAG}::${MAC_TAG}"
- always-pull: true
- debug: true
- wait-network: true
- timeout: 120
- skip: $SKIP_MOJAVE
-
- # Amazon Linux 2
- - label: ":aws: Amazon Linux 2 Tests"
- command: |
- echo "--- :arrow_down: Downloading Build Directory"
- buildkite-agent artifact download build.tar.gz . --step ":aws: Amazon Linux 2 Build"
- ./scripts/parallel-test.sh
- agents:
- queue: "automation-large-builder-fleet"
- plugins:
- ecr#v1.1.4:
- login: true
- account_ids: "436617320021"
- no-include-email: true
- region: "us-west-2"
- docker#v3.2.0:
- shell: ["/bin/bash", "-i", "-e", "-c"]
- debug: true
- image: "436617320021.dkr.ecr.us-west-2.amazonaws.com/ci:amazonlinux2_2-5"
- propagate-environment: true
- workdir: /data/job
- timeout: 60
- skip: $SKIP_TESTS
-
- - label: ":aws: Amazon Linux 2 NP Tests"
- command: |
- echo "--- :arrow_down: Downloading Build Directory"
- buildkite-agent artifact download build.tar.gz . --step ":aws: Amazon Linux 2 Build"
- ./scripts/serial-test.sh
- label: ":aws: Amazon Linux 2 NP Tests"
- agents:
- queue: "automation-large-builder-fleet"
- plugins:
- ecr#v1.1.4:
- login: true
- account_ids: "436617320021"
- no-include-email: true
- region: "us-west-2"
- docker#v3.2.0:
- shell: ["/bin/bash", "-i", "-e", "-c"]
- debug: true
- image: "436617320021.dkr.ecr.us-west-2.amazonaws.com/ci:amazonlinux2_2-5"
- propagate-environment: true
- workdir: /data/job
- timeout: 60
-
- # CentOS
- - label: ":centos: CentOS 7 Tests"
- command: |
- echo "--- :arrow_down: Downloading Build Directory"
- buildkite-agent artifact download build.tar.gz . --step ":centos: CentOS 7 Build"
- ./scripts/parallel-test.sh
- agents:
- queue: "automation-large-builder-fleet"
- plugins:
- ecr#v1.1.4:
- login: true
- account_ids: "436617320021"
- no-include-email: true
- region: "us-west-2"
- docker#v3.2.0:
- shell: ["/bin/bash", "-i", "-e", "-c"]
- debug: true
- image: "436617320021.dkr.ecr.us-west-2.amazonaws.com/ci:centos7_2-5"
- propagate-environment: true
- workdir: /data/job
- timeout: 60
- skip: $SKIP_TESTS
-
- - label: ":centos: CentOS 7 NP Tests"
- command: |
- echo "--- :arrow_down: Downloading Build Directory"
- buildkite-agent artifact download build.tar.gz . --step ":centos: CentOS 7 Build"
- ./scripts/serial-test.sh
- agents:
- queue: "automation-large-builder-fleet"
- plugins:
- ecr#v1.1.4:
- login: true
- account_ids: "436617320021"
- no-include-email: true
- region: "us-west-2"
- docker#v3.2.0:
- shell: ["/bin/bash", "-i", "-e", "-c"]
- debug: true
- image: "436617320021.dkr.ecr.us-west-2.amazonaws.com/ci:centos7_2-5"
- propagate-environment: true
- workdir: /data/job
- timeout: 60
-
- # Ubuntu 16.04
- - label: ":ubuntu: Ubuntu 16.04 Tests"
- command: |
- echo "--- :arrow_down: Downloading Build Directory"
- buildkite-agent artifact download build.tar.gz . --step ":ubuntu: Ubuntu 16.04 Build"
- ./scripts/parallel-test.sh
- agents:
- queue: "automation-large-builder-fleet"
- plugins:
- ecr#v1.1.4:
- login: true
- account_ids: "436617320021"
- no-include-email: true
- region: "us-west-2"
- docker#v3.2.0:
- shell: ["/bin/bash", "-i", "-e", "-c"]
- debug: true
- image: "436617320021.dkr.ecr.us-west-2.amazonaws.com/ci:ubuntu16_2-5"
- propagate-environment: true
- workdir: /data/job
- timeout: 60
- skip: $SKIP_TESTS
-
- - label: ":ubuntu: Ubuntu 16.04 NP Tests"
- command: |
- echo "--- :arrow_down: Downloading Build Directory"
- buildkite-agent artifact download build.tar.gz . --step ":ubuntu: Ubuntu 16.04 Build"
- ./scripts/serial-test.sh
- agents:
- queue: "automation-large-builder-fleet"
- plugins:
- ecr#v1.1.4:
- login: true
- account_ids: "436617320021"
- no-include-email: true
- region: "us-west-2"
- docker#v3.2.0:
- shell: ["/bin/bash", "-i", "-e", "-c"]
- debug: true
- image: "436617320021.dkr.ecr.us-west-2.amazonaws.com/ci:ubuntu16_2-5"
- propagate-environment: true
- workdir: /data/job
- timeout: 60
-
- # Ubuntu 18.04
- - label: ":ubuntu: Ubuntu 18.04 Tests"
- command: |
- echo "--- :arrow_down: Downloading Build Directory"
- buildkite-agent artifact download build.tar.gz . --step ":ubuntu: Ubuntu 18.04 Build"
- ./scripts/parallel-test.sh
- agents:
- queue: "automation-large-builder-fleet"
- plugins:
- ecr#v1.1.4:
- login: true
- account_ids: "436617320021"
- no-include-email: true
- region: "us-west-2"
- docker#v3.2.0:
- shell: ["/bin/bash", "-i", "-e", "-c"]
- debug: true
- image: "436617320021.dkr.ecr.us-west-2.amazonaws.com/ci:ubuntu18_2-5"
- propagate-environment: true
- workdir: /data/job
- timeout: 60
- skip: $SKIP_TESTS
-
- - label: ":ubuntu: Ubuntu 18.04 NP Tests"
- command: |
- echo "--- :arrow_down: Downloading Build Directory"
- buildkite-agent artifact download build.tar.gz . --step ":ubuntu: Ubuntu 18.04 Build"
- ./scripts/serial-test.sh
- agents:
- queue: "automation-large-builder-fleet"
- plugins:
- ecr#v1.1.4:
- login: true
- account_ids: "436617320021"
- no-include-email: true
- region: "us-west-2"
- docker#v3.2.0:
- shell: ["/bin/bash", "-i", "-e", "-c"]
- debug: true
- image: "436617320021.dkr.ecr.us-west-2.amazonaws.com/ci:ubuntu18_2-5"
- propagate-environment: true
- workdir: /data/job
- timeout: 60
-
- - wait: # Test Metrics
- continue_on_failure: true
-
- - command: |
- cd scripts/metrics
- node --max-old-space-size=4096 test-metrics.js
- label: ":bar_chart: Test Metrics"
- agents:
- queue: "automation-apps-builder-fleet"
- timeout: 10
- soft_fail: true
-
- - wait
-
- - label: ":centos: CentOS 7 Package Builder"
- command: |
- ./scripts/package-builder.sh ':centos: CentOS 7 Build'
- agents:
- queue: "automation-large-builder-fleet"
- plugins:
- ecr#v1.1.4:
- login: true
- account_ids: "436617320021"
- no-include-email: true
- region: "us-west-2"
- docker#v3.2.0:
- shell: ["/bin/bash", "-i", "-e", "-c"]
- debug: true
- image: "436617320021.dkr.ecr.us-west-2.amazonaws.com/ci:centos7_2-5"
- propagate-environment: true
- workdir: /data/job
- env:
- OS: "el7"
- PKGTYPE: "rpm"
- timeout: 60
-
- - command: | # Ubuntu 16.04 Package Builder
- ./scripts/package-builder.sh ':ubuntu: Ubuntu 16.04 Build'
- label: ":ubuntu: Ubuntu 16.04 Package Builder"
- agents:
- queue: "automation-large-builder-fleet"
- plugins:
- ecr#v1.1.4:
- login: true
- account_ids: "436617320021"
- no-include-email: true
- region: "us-west-2"
- docker#v3.2.0:
- shell: ["/bin/bash", "-i", "-e", "-c"]
- debug: true
- image: "436617320021.dkr.ecr.us-west-2.amazonaws.com/ci:ubuntu16_2-5"
- propagate-environment: true
- workdir: /data/job
- env:
- OS: "ubuntu-16.04"
- PKGTYPE: "deb"
- timeout: 60
-
- - command: | # Ubuntu 18.04 Package Builder
- ./scripts/package-builder.sh ':ubuntu: Ubuntu 18.04 Build'
- label: ":ubuntu: Ubuntu 18.04 Package Builder"
- agents:
- queue: "automation-large-builder-fleet"
- plugins:
- ecr#v1.1.4:
- login: true
- account_ids: "436617320021"
- no-include-email: true
- region: "us-west-2"
- docker#v3.2.0:
- shell: ["/bin/bash", "-i", "-e", "-c"]
- debug: true
- image: "436617320021.dkr.ecr.us-west-2.amazonaws.com/ci:ubuntu18_2-5"
- propagate-environment: true
- workdir: /data/job
- env:
- OS: "ubuntu-18.04"
- PKGTYPE: "deb"
- timeout: 60
-
- - label: ":darwin: Mojave Package Builder"
- command: |
- git clone ${BUILDKITE_PULL_REQUEST_REPO:-$BUILDKITE_REPO} .
- git checkout ${BUILDKITE_COMMIT}
- ./scripts/package-builder.sh ':darwin: [Darwin] Mojave Build'
- agents:
- - "queue=mac-anka-node-fleet"
- plugins:
- chef/anka#v0.4.4:
- no-volume: true
- workdir: $WORKDIR
- inherit-environment-vars: true
- bash-interactive: true
- vm-name: $ANKA_MOJAVE_TEMPLATE
- vm-registry-tag: "${ANKA_TEMPLATE_TAG}::${MAC_TAG}"
- always-pull: true
- debug: true
- wait-network: true
- timeout: 60
- skip: $SKIP_MOJAVE
-
- - wait
-
- - command: | # Brew Updater
- echo "+++ :arrow_down: Downloading brew files"
- buildkite-agent artifact download "eosio.rb" . --step ":darwin: Mojave Package Builder"
- label: ":darwin: Brew Updater"
- agents:
- queue: "automation-large-builder-fleet"
- artifact_paths:
- - "eosio.rb"
- timeout: 5
- skip: $SKIP_MOJAVE
-
- - command: | # Git Submodule Regression Check
- echo "+++ :git: Running Git Submodule Regression Check"
- ./scripts/submodule_check.sh
- label: "Git Submodule Regression Check"
- agents:
- queue: "automation-large-builder-fleet"
- timeout: 5
\ No newline at end of file
diff --git a/scripts/metrics/node_modules/node-fetch/CHANGELOG.md b/scripts/metrics/node_modules/node-fetch/CHANGELOG.md
deleted file mode 100644
index 941b6a8d8b7..00000000000
--- a/scripts/metrics/node_modules/node-fetch/CHANGELOG.md
+++ /dev/null
@@ -1,260 +0,0 @@
-
-Changelog
-=========
-
-
-# 2.x release
-
-## v2.5.0
-
-- Enhance: `Response` object now includes `redirected` property.
-- Enhance: `fetch()` now accepts third-party `Blob` implementation as body.
-- Other: disable `package-lock.json` generation as we never commit them.
-- Other: dev dependency update.
-- Other: readme update.
-
-## v2.4.1
-
-- Fix: `Blob` import rule for node < 10, as `Readable` isn't a named export.
-
-## v2.4.0
-
-- Enhance: added `Brotli` compression support (using node's zlib).
-- Enhance: updated `Blob` implementation per spec.
-- Fix: set content type automatically for `URLSearchParams`.
-- Fix: `Headers` now reject empty header names.
-- Fix: test cases, as node 12+ no longer accepts invalid header response.
-
-## v2.3.0
-
-- Enhance: added `AbortSignal` support, with README example.
-- Enhance: handle invalid `Location` header during redirect by rejecting them explicitly with `FetchError`.
-- Fix: update `browser.js` to support react-native environment, where `self` isn't available globally.
-
-## v2.2.1
-
-- Fix: `compress` flag shouldn't overwrite existing `Accept-Encoding` header.
-- Fix: multiple `import` rules, where `PassThrough` etc. doesn't have a named export when using node <10 and `--exerimental-modules` flag.
-- Other: Better README.
-
-## v2.2.0
-
-- Enhance: Support all `ArrayBuffer` view types
-- Enhance: Support Web Workers
-- Enhance: Support Node.js' `--experimental-modules` mode; deprecate `.es.js` file
-- Fix: Add `__esModule` property to the exports object
-- Other: Better example in README for writing response to a file
-- Other: More tests for Agent
-
-## v2.1.2
-
-- Fix: allow `Body` methods to work on `ArrayBuffer`-backed `Body` objects
-- Fix: reject promise returned by `Body` methods when the accumulated `Buffer` exceeds the maximum size
-- Fix: support custom `Host` headers with any casing
-- Fix: support importing `fetch()` from TypeScript in `browser.js`
-- Fix: handle the redirect response body properly
-
-## v2.1.1
-
-Fix packaging errors in v2.1.0.
-
-## v2.1.0
-
-- Enhance: allow using ArrayBuffer as the `body` of a `fetch()` or `Request`
-- Fix: store HTTP headers of a `Headers` object internally with the given case, for compatibility with older servers that incorrectly treated header names in a case-sensitive manner
-- Fix: silently ignore invalid HTTP headers
-- Fix: handle HTTP redirect responses without a `Location` header just like non-redirect responses
-- Fix: include bodies when following a redirection when appropriate
-
-## v2.0.0
-
-This is a major release. Check [our upgrade guide](https://github.com/bitinn/node-fetch/blob/master/UPGRADE-GUIDE.md) for an overview on some key differences between v1 and v2.
-
-### General changes
-
-- Major: Node.js 0.10.x and 0.12.x support is dropped
-- Major: `require('node-fetch/lib/response')` etc. is now unsupported; use `require('node-fetch').Response` or ES6 module imports
-- Enhance: start testing on Node.js v4.x, v6.x, v8.x LTS, as well as v9.x stable
-- Enhance: use Rollup to produce a distributed bundle (less memory overhead and faster startup)
-- Enhance: make `Object.prototype.toString()` on Headers, Requests, and Responses return correct class strings
-- Other: rewrite in ES2015 using Babel
-- Other: use Codecov for code coverage tracking
-- Other: update package.json script for npm 5
-- Other: `encoding` module is now optional (alpha.7)
-- Other: expose browser.js through package.json, avoid bundling mishaps (alpha.9)
-- Other: allow TypeScript to `import` node-fetch by exposing default (alpha.9)
-
-### HTTP requests
-
-- Major: overwrite user's `Content-Length` if we can be sure our information is correct (per spec)
-- Fix: errors in a response are caught before the body is accessed
-- Fix: support WHATWG URL objects, created by `whatwg-url` package or `require('url').URL` in Node.js 7+
-
-### Response and Request classes
-
-- Major: `response.text()` no longer attempts to detect encoding, instead always opting for UTF-8 (per spec); use `response.textConverted()` for the v1 behavior
-- Major: make `response.json()` throw error instead of returning an empty object on 204 no-content respose (per spec; reverts behavior changed in v1.6.2)
-- Major: internal methods are no longer exposed
-- Major: throw error when a `GET` or `HEAD` Request is constructed with a non-null body (per spec)
-- Enhance: add `response.arrayBuffer()` (also applies to Requests)
-- Enhance: add experimental `response.blob()` (also applies to Requests)
-- Enhance: `URLSearchParams` is now accepted as a body
-- Enhance: wrap `response.json()` json parsing error as `FetchError`
-- Fix: fix Request and Response with `null` body
-
-### Headers class
-
-- Major: remove `headers.getAll()`; make `get()` return all headers delimited by commas (per spec)
-- Enhance: make Headers iterable
-- Enhance: make Headers constructor accept an array of tuples
-- Enhance: make sure header names and values are valid in HTTP
-- Fix: coerce Headers prototype function parameters to strings, where applicable
-
-### Documentation
-
-- Enhance: more comprehensive API docs
-- Enhance: add a list of default headers in README
-
-
-# 1.x release
-
-## backport releases (v1.7.0 and beyond)
-
-See [changelog on 1.x branch](https://github.com/bitinn/node-fetch/blob/1.x/CHANGELOG.md) for details.
-
-## v1.6.3
-
-- Enhance: error handling document to explain `FetchError` design
-- Fix: support `form-data` 2.x releases (requires `form-data` >= 2.1.0)
-
-## v1.6.2
-
-- Enhance: minor document update
-- Fix: response.json() returns empty object on 204 no-content response instead of throwing a syntax error
-
-## v1.6.1
-
-- Fix: if `res.body` is a non-stream non-formdata object, we will call `body.toString` and send it as a string
-- Fix: `counter` value is incorrectly set to `follow` value when wrapping Request instance
-- Fix: documentation update
-
-## v1.6.0
-
-- Enhance: added `res.buffer()` api for convenience, it returns body as a Node.js buffer
-- Enhance: better old server support by handling raw deflate response
-- Enhance: skip encoding detection for non-HTML/XML response
-- Enhance: minor document update
-- Fix: HEAD request doesn't need decompression, as body is empty
-- Fix: `req.body` now accepts a Node.js buffer
-
-## v1.5.3
-
-- Fix: handle 204 and 304 responses when body is empty but content-encoding is gzip/deflate
-- Fix: allow resolving response and cloned response in any order
-- Fix: avoid setting `content-length` when `form-data` body use streams
-- Fix: send DELETE request with content-length when body is present
-- Fix: allow any url when calling new Request, but still reject non-http(s) url in fetch
-
-## v1.5.2
-
-- Fix: allow node.js core to handle keep-alive connection pool when passing a custom agent
-
-## v1.5.1
-
-- Fix: redirect mode `manual` should work even when there is no redirection or broken redirection
-
-## v1.5.0
-
-- Enhance: rejected promise now use custom `Error` (thx to @pekeler)
-- Enhance: `FetchError` contains `err.type` and `err.code`, allows for better error handling (thx to @pekeler)
-- Enhance: basic support for redirect mode `manual` and `error`, allows for location header extraction (thx to @jimmywarting for the initial PR)
-
-## v1.4.1
-
-- Fix: wrapping Request instance with FormData body again should preserve the body as-is
-
-## v1.4.0
-
-- Enhance: Request and Response now have `clone` method (thx to @kirill-konshin for the initial PR)
-- Enhance: Request and Response now have proper string and buffer body support (thx to @kirill-konshin)
-- Enhance: Body constructor has been refactored out (thx to @kirill-konshin)
-- Enhance: Headers now has `forEach` method (thx to @tricoder42)
-- Enhance: back to 100% code coverage
-- Fix: better form-data support (thx to @item4)
-- Fix: better character encoding detection under chunked encoding (thx to @dsuket for the initial PR)
-
-## v1.3.3
-
-- Fix: make sure `Content-Length` header is set when body is string for POST/PUT/PATCH requests
-- Fix: handle body stream error, for cases such as incorrect `Content-Encoding` header
-- Fix: when following certain redirects, use `GET` on subsequent request per Fetch Spec
-- Fix: `Request` and `Response` constructors now parse headers input using `Headers`
-
-## v1.3.2
-
-- Enhance: allow auto detect of form-data input (no `FormData` spec on node.js, this is form-data specific feature)
-
-## v1.3.1
-
-- Enhance: allow custom host header to be set (server-side only feature, as it's a forbidden header on client-side)
-
-## v1.3.0
-
-- Enhance: now `fetch.Request` is exposed as well
-
-## v1.2.1
-
-- Enhance: `Headers` now normalized `Number` value to `String`, prevent common mistakes
-
-## v1.2.0
-
-- Enhance: now fetch.Headers and fetch.Response are exposed, making testing easier
-
-## v1.1.2
-
-- Fix: `Headers` should only support `String` and `Array` properties, and ignore others
-
-## v1.1.1
-
-- Enhance: now req.headers accept both plain object and `Headers` instance
-
-## v1.1.0
-
-- Enhance: timeout now also applies to response body (in case of slow response)
-- Fix: timeout is now cleared properly when fetch is done/has failed
-
-## v1.0.6
-
-- Fix: less greedy content-type charset matching
-
-## v1.0.5
-
-- Fix: when `follow = 0`, fetch should not follow redirect
-- Enhance: update tests for better coverage
-- Enhance: code formatting
-- Enhance: clean up doc
-
-## v1.0.4
-
-- Enhance: test iojs support
-- Enhance: timeout attached to socket event only fire once per redirect
-
-## v1.0.3
-
-- Fix: response size limit should reject large chunk
-- Enhance: added character encoding detection for xml, such as rss/atom feed (encoding in DTD)
-
-## v1.0.2
-
-- Fix: added res.ok per spec change
-
-## v1.0.0
-
-- Enhance: better test coverage and doc
-
-
-# 0.x release
-
-## v0.1
-
-- Major: initial public release
diff --git a/scripts/metrics/node_modules/node-fetch/LICENSE.md b/scripts/metrics/node_modules/node-fetch/LICENSE.md
deleted file mode 100644
index 660ffecb58b..00000000000
--- a/scripts/metrics/node_modules/node-fetch/LICENSE.md
+++ /dev/null
@@ -1,22 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2016 David Frank
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-
diff --git a/scripts/metrics/node_modules/node-fetch/README.md b/scripts/metrics/node_modules/node-fetch/README.md
deleted file mode 100644
index 48f4215e4e7..00000000000
--- a/scripts/metrics/node_modules/node-fetch/README.md
+++ /dev/null
@@ -1,538 +0,0 @@
-node-fetch
-==========
-
-[![npm version][npm-image]][npm-url]
-[![build status][travis-image]][travis-url]
-[![coverage status][codecov-image]][codecov-url]
-[![install size][install-size-image]][install-size-url]
-
-A light-weight module that brings `window.fetch` to Node.js
-
-(We are looking for [v2 maintainers and collaborators](https://github.com/bitinn/node-fetch/issues/567))
-
-
-
-- [Motivation](#motivation)
-- [Features](#features)
-- [Difference from client-side fetch](#difference-from-client-side-fetch)
-- [Installation](#installation)
-- [Loading and configuring the module](#loading-and-configuring-the-module)
-- [Common Usage](#common-usage)
- - [Plain text or HTML](#plain-text-or-html)
- - [JSON](#json)
- - [Simple Post](#simple-post)
- - [Post with JSON](#post-with-json)
- - [Post with form parameters](#post-with-form-parameters)
- - [Handling exceptions](#handling-exceptions)
- - [Handling client and server errors](#handling-client-and-server-errors)
-- [Advanced Usage](#advanced-usage)
- - [Streams](#streams)
- - [Buffer](#buffer)
- - [Accessing Headers and other Meta data](#accessing-headers-and-other-meta-data)
- - [Post data using a file stream](#post-data-using-a-file-stream)
- - [Post with form-data (detect multipart)](#post-with-form-data-detect-multipart)
- - [Request cancellation with AbortSignal](#request-cancellation-with-abortsignal)
-- [API](#api)
- - [fetch(url[, options])](#fetchurl-options)
- - [Options](#options)
- - [Class: Request](#class-request)
- - [Class: Response](#class-response)
- - [Class: Headers](#class-headers)
- - [Interface: Body](#interface-body)
- - [Class: FetchError](#class-fetcherror)
-- [License](#license)
-- [Acknowledgement](#acknowledgement)
-
-
-
-## Motivation
-
-Instead of implementing `XMLHttpRequest` in Node.js to run browser-specific [Fetch polyfill](https://github.com/github/fetch), why not go from native `http` to `fetch` API directly? Hence `node-fetch`, minimal code for a `window.fetch` compatible API on Node.js runtime.
-
-See Matt Andrews' [isomorphic-fetch](https://github.com/matthew-andrews/isomorphic-fetch) or Leonardo Quixada's [cross-fetch](https://github.com/lquixada/cross-fetch) for isomorphic usage (exports `node-fetch` for server-side, `whatwg-fetch` for client-side).
-
-## Features
-
-- Stay consistent with `window.fetch` API.
-- Make conscious trade-off when following [WHATWG fetch spec][whatwg-fetch] and [stream spec](https://streams.spec.whatwg.org/) implementation details, document known differences.
-- Use native promise, but allow substituting it with [insert your favorite promise library].
-- Use native Node streams for body, on both request and response.
-- Decode content encoding (gzip/deflate) properly, and convert string output (such as `res.text()` and `res.json()`) to UTF-8 automatically.
-- Useful extensions such as timeout, redirect limit, response size limit, [explicit errors](ERROR-HANDLING.md) for troubleshooting.
-
-## Difference from client-side fetch
-
-- See [Known Differences](LIMITS.md) for details.
-- If you happen to use a missing feature that `window.fetch` offers, feel free to open an issue.
-- Pull requests are welcomed too!
-
-## Installation
-
-Current stable release (`2.x`)
-
-```sh
-$ npm install node-fetch --save
-```
-
-## Loading and configuring the module
-We suggest you load the module via `require`, pending the stabalizing of es modules in node:
-```js
-const fetch = require('node-fetch');
-```
-
-If you are using a Promise library other than native, set it through fetch.Promise:
-```js
-const Bluebird = require('bluebird');
-
-fetch.Promise = Bluebird;
-```
-
-## Common Usage
-
-NOTE: The documentation below is up-to-date with `2.x` releases, [see `1.x` readme](https://github.com/bitinn/node-fetch/blob/1.x/README.md), [changelog](https://github.com/bitinn/node-fetch/blob/1.x/CHANGELOG.md) and [2.x upgrade guide](UPGRADE-GUIDE.md) for the differences.
-
-#### Plain text or HTML
-```js
-fetch('https://github.com/')
- .then(res => res.text())
- .then(body => console.log(body));
-```
-
-#### JSON
-
-```js
-
-fetch('https://api.github.com/users/github')
- .then(res => res.json())
- .then(json => console.log(json));
-```
-
-#### Simple Post
-```js
-fetch('https://httpbin.org/post', { method: 'POST', body: 'a=1' })
- .then(res => res.json()) // expecting a json response
- .then(json => console.log(json));
-```
-
-#### Post with JSON
-
-```js
-const body = { a: 1 };
-
-fetch('https://httpbin.org/post', {
- method: 'post',
- body: JSON.stringify(body),
- headers: { 'Content-Type': 'application/json' },
- })
- .then(res => res.json())
- .then(json => console.log(json));
-```
-
-#### Post with form parameters
-`URLSearchParams` is available in Node.js as of v7.5.0. See [official documentation](https://nodejs.org/api/url.html#url_class_urlsearchparams) for more usage methods.
-
-NOTE: The `Content-Type` header is only set automatically to `x-www-form-urlencoded` when an instance of `URLSearchParams` is given as such:
-
-```js
-const { URLSearchParams } = require('url');
-
-const params = new URLSearchParams();
-params.append('a', 1);
-
-fetch('https://httpbin.org/post', { method: 'POST', body: params })
- .then(res => res.json())
- .then(json => console.log(json));
-```
-
-#### Handling exceptions
-NOTE: 3xx-5xx responses are *NOT* exceptions, and should be handled in `then()`, see the next section.
-
-Adding a catch to the fetch promise chain will catch *all* exceptions, such as errors originating from node core libraries, like network errors, and operational errors which are instances of FetchError. See the [error handling document](ERROR-HANDLING.md) for more details.
-
-```js
-fetch('https://domain.invalid/')
- .catch(err => console.error(err));
-```
-
-#### Handling client and server errors
-It is common to create a helper function to check that the response contains no client (4xx) or server (5xx) error responses:
-
-```js
-function checkStatus(res) {
- if (res.ok) { // res.status >= 200 && res.status < 300
- return res;
- } else {
- throw MyCustomError(res.statusText);
- }
-}
-
-fetch('https://httpbin.org/status/400')
- .then(checkStatus)
- .then(res => console.log('will not get here...'))
-```
-
-## Advanced Usage
-
-#### Streams
-The "Node.js way" is to use streams when possible:
-
-```js
-fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png')
- .then(res => {
- const dest = fs.createWriteStream('./octocat.png');
- res.body.pipe(dest);
- });
-```
-
-#### Buffer
-If you prefer to cache binary data in full, use buffer(). (NOTE: buffer() is a `node-fetch` only API)
-
-```js
-const fileType = require('file-type');
-
-fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png')
- .then(res => res.buffer())
- .then(buffer => fileType(buffer))
- .then(type => { /* ... */ });
-```
-
-#### Accessing Headers and other Meta data
-```js
-fetch('https://github.com/')
- .then(res => {
- console.log(res.ok);
- console.log(res.status);
- console.log(res.statusText);
- console.log(res.headers.raw());
- console.log(res.headers.get('content-type'));
- });
-```
-
-#### Post data using a file stream
-
-```js
-const { createReadStream } = require('fs');
-
-const stream = createReadStream('input.txt');
-
-fetch('https://httpbin.org/post', { method: 'POST', body: stream })
- .then(res => res.json())
- .then(json => console.log(json));
-```
-
-#### Post with form-data (detect multipart)
-
-```js
-const FormData = require('form-data');
-
-const form = new FormData();
-form.append('a', 1);
-
-fetch('https://httpbin.org/post', { method: 'POST', body: form })
- .then(res => res.json())
- .then(json => console.log(json));
-
-// OR, using custom headers
-// NOTE: getHeaders() is non-standard API
-
-const form = new FormData();
-form.append('a', 1);
-
-const options = {
- method: 'POST',
- body: form,
- headers: form.getHeaders()
-}
-
-fetch('https://httpbin.org/post', options)
- .then(res => res.json())
- .then(json => console.log(json));
-```
-
-#### Request cancellation with AbortSignal
-
-> NOTE: You may only cancel streamed requests on Node >= v8.0.0
-
-You may cancel requests with `AbortController`. A suggested implementation is [`abort-controller`](https://www.npmjs.com/package/abort-controller).
-
-An example of timing out a request after 150ms could be achieved as follows:
-
-```js
-import AbortController from 'abort-controller';
-
-const controller = new AbortController();
-const timeout = setTimeout(
- () => { controller.abort(); },
- 150,
-);
-
-fetch(url, { signal: controller.signal })
- .then(res => res.json())
- .then(
- data => {
- useData(data)
- },
- err => {
- if (err.name === 'AbortError') {
- // request was aborted
- }
- },
- )
- .finally(() => {
- clearTimeout(timeout);
- });
-```
-
-See [test cases](https://github.com/bitinn/node-fetch/blob/master/test/test.js) for more examples.
-
-
-## API
-
-### fetch(url[, options])
-
-- `url` A string representing the URL for fetching
-- `options` [Options](#fetch-options) for the HTTP(S) request
-- Returns: Promise<[Response](#class-response)>
-
-Perform an HTTP(S) fetch.
-
-`url` should be an absolute url, such as `https://example.com/`. A path-relative URL (`/file/under/root`) or protocol-relative URL (`//can-be-http-or-https.com/`) will result in a rejected promise.
-
-
-### Options
-
-The default values are shown after each option key.
-
-```js
-{
- // These properties are part of the Fetch Standard
- method: 'GET',
- headers: {}, // request headers. format is the identical to that accepted by the Headers constructor (see below)
- body: null, // request body. can be null, a string, a Buffer, a Blob, or a Node.js Readable stream
- redirect: 'follow', // set to `manual` to extract redirect headers, `error` to reject redirect
- signal: null, // pass an instance of AbortSignal to optionally abort requests
-
- // The following properties are node-fetch extensions
- follow: 20, // maximum redirect count. 0 to not follow redirect
- timeout: 0, // req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies). Signal is recommended instead.
- compress: true, // support gzip/deflate content encoding. false to disable
- size: 0, // maximum response body size in bytes. 0 to disable
- agent: null // http(s).Agent instance, allows custom proxy, certificate, dns lookup etc.
-}
-```
-
-##### Default Headers
-
-If no values are set, the following request headers will be sent automatically:
-
-Header | Value
-------------------- | --------------------------------------------------------
-`Accept-Encoding` | `gzip,deflate` _(when `options.compress === true`)_
-`Accept` | `*/*`
-`Connection` | `close` _(when no `options.agent` is present)_
-`Content-Length` | _(automatically calculated, if possible)_
-`Transfer-Encoding` | `chunked` _(when `req.body` is a stream)_
-`User-Agent` | `node-fetch/1.0 (+https://github.com/bitinn/node-fetch)`
-
-
-### Class: Request
-
-An HTTP(S) request containing information about URL, method, headers, and the body. This class implements the [Body](#iface-body) interface.
-
-Due to the nature of Node.js, the following properties are not implemented at this moment:
-
-- `type`
-- `destination`
-- `referrer`
-- `referrerPolicy`
-- `mode`
-- `credentials`
-- `cache`
-- `integrity`
-- `keepalive`
-
-The following node-fetch extension properties are provided:
-
-- `follow`
-- `compress`
-- `counter`
-- `agent`
-
-See [options](#fetch-options) for exact meaning of these extensions.
-
-#### new Request(input[, options])
-
-*(spec-compliant)*
-
-- `input` A string representing a URL, or another `Request` (which will be cloned)
-- `options` [Options][#fetch-options] for the HTTP(S) request
-
-Constructs a new `Request` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Request/Request).
-
-In most cases, directly `fetch(url, options)` is simpler than creating a `Request` object.
-
-
-### Class: Response
-
-An HTTP(S) response. This class implements the [Body](#iface-body) interface.
-
-The following properties are not implemented in node-fetch at this moment:
-
-- `Response.error()`
-- `Response.redirect()`
-- `type`
-- `trailer`
-
-#### new Response([body[, options]])
-
-*(spec-compliant)*
-
-- `body` A string or [Readable stream][node-readable]
-- `options` A [`ResponseInit`][response-init] options dictionary
-
-Constructs a new `Response` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Response/Response).
-
-Because Node.js does not implement service workers (for which this class was designed), one rarely has to construct a `Response` directly.
-
-#### response.ok
-
-*(spec-compliant)*
-
-Convenience property representing if the request ended normally. Will evaluate to true if the response status was greater than or equal to 200 but smaller than 300.
-
-#### response.redirected
-
-*(spec-compliant)*
-
-Convenience property representing if the request has been redirected at least once. Will evaluate to true if the internal redirect counter is greater than 0.
-
-
-### Class: Headers
-
-This class allows manipulating and iterating over a set of HTTP headers. All methods specified in the [Fetch Standard][whatwg-fetch] are implemented.
-
-#### new Headers([init])
-
-*(spec-compliant)*
-
-- `init` Optional argument to pre-fill the `Headers` object
-
-Construct a new `Headers` object. `init` can be either `null`, a `Headers` object, an key-value map object, or any iterable object.
-
-```js
-// Example adapted from https://fetch.spec.whatwg.org/#example-headers-class
-
-const meta = {
- 'Content-Type': 'text/xml',
- 'Breaking-Bad': '<3'
-};
-const headers = new Headers(meta);
-
-// The above is equivalent to
-const meta = [
- [ 'Content-Type', 'text/xml' ],
- [ 'Breaking-Bad', '<3' ]
-];
-const headers = new Headers(meta);
-
-// You can in fact use any iterable objects, like a Map or even another Headers
-const meta = new Map();
-meta.set('Content-Type', 'text/xml');
-meta.set('Breaking-Bad', '<3');
-const headers = new Headers(meta);
-const copyOfHeaders = new Headers(headers);
-```
-
-
-### Interface: Body
-
-`Body` is an abstract interface with methods that are applicable to both `Request` and `Response` classes.
-
-The following methods are not yet implemented in node-fetch at this moment:
-
-- `formData()`
-
-#### body.body
-
-*(deviation from spec)*
-
-* Node.js [`Readable` stream][node-readable]
-
-The data encapsulated in the `Body` object. Note that while the [Fetch Standard][whatwg-fetch] requires the property to always be a WHATWG `ReadableStream`, in node-fetch it is a Node.js [`Readable` stream][node-readable].
-
-#### body.bodyUsed
-
-*(spec-compliant)*
-
-* `Boolean`
-
-A boolean property for if this body has been consumed. Per spec, a consumed body cannot be used again.
-
-#### body.arrayBuffer()
-#### body.blob()
-#### body.json()
-#### body.text()
-
-*(spec-compliant)*
-
-* Returns: Promise
-
-Consume the body and return a promise that will resolve to one of these formats.
-
-#### body.buffer()
-
-*(node-fetch extension)*
-
-* Returns: Promise<Buffer>
-
-Consume the body and return a promise that will resolve to a Buffer.
-
-#### body.textConverted()
-
-*(node-fetch extension)*
-
-* Returns: Promise<String>
-
-Identical to `body.text()`, except instead of always converting to UTF-8, encoding sniffing will be performed and text converted to UTF-8, if possible.
-
-(This API requires an optional dependency on npm package [encoding](https://www.npmjs.com/package/encoding), which you need to install manually. `webpack` users may see [a warning message](https://github.com/bitinn/node-fetch/issues/412#issuecomment-379007792) due to this optional dependency.)
-
-
-### Class: FetchError
-
-*(node-fetch extension)*
-
-An operational error in the fetching process. See [ERROR-HANDLING.md][] for more info.
-
-
-### Class: AbortError
-
-*(node-fetch extension)*
-
-An Error thrown when the request is aborted in response to an `AbortSignal`'s `abort` event. It has a `name` property of `AbortError`. See [ERROR-HANDLING.MD][] for more info.
-
-## Acknowledgement
-
-Thanks to [github/fetch](https://github.com/github/fetch) for providing a solid implementation reference.
-
-`node-fetch` v1 was maintained by [@bitinn](https://github.com/bitinn); v2 was maintained by [@TimothyGu](https://github.com/timothygu), [@bitinn](https://github.com/bitinn) and [@jimmywarting](https://github.com/jimmywarting); v2 readme is written by [@jkantr](https://github.com/jkantr).
-
-## License
-
-MIT
-
-[npm-image]: https://flat.badgen.net/npm/v/node-fetch
-[npm-url]: https://www.npmjs.com/package/node-fetch
-[travis-image]: https://flat.badgen.net/travis/bitinn/node-fetch
-[travis-url]: https://travis-ci.org/bitinn/node-fetch
-[codecov-image]: https://flat.badgen.net/codecov/c/github/bitinn/node-fetch/master
-[codecov-url]: https://codecov.io/gh/bitinn/node-fetch
-[install-size-image]: https://flat.badgen.net/packagephobia/install/node-fetch
-[install-size-url]: https://packagephobia.now.sh/result?p=node-fetch
-[whatwg-fetch]: https://fetch.spec.whatwg.org/
-[response-init]: https://fetch.spec.whatwg.org/#responseinit
-[node-readable]: https://nodejs.org/api/stream.html#stream_readable_streams
-[mdn-headers]: https://developer.mozilla.org/en-US/docs/Web/API/Headers
-[LIMITS.md]: https://github.com/bitinn/node-fetch/blob/master/LIMITS.md
-[ERROR-HANDLING.md]: https://github.com/bitinn/node-fetch/blob/master/ERROR-HANDLING.md
-[UPGRADE-GUIDE.md]: https://github.com/bitinn/node-fetch/blob/master/UPGRADE-GUIDE.md
diff --git a/scripts/metrics/node_modules/node-fetch/browser.js b/scripts/metrics/node_modules/node-fetch/browser.js
deleted file mode 100644
index 0ad5de004c4..00000000000
--- a/scripts/metrics/node_modules/node-fetch/browser.js
+++ /dev/null
@@ -1,23 +0,0 @@
-"use strict";
-
-// ref: https://github.com/tc39/proposal-global
-var getGlobal = function () {
- // the only reliable means to get the global object is
- // `Function('return this')()`
- // However, this causes CSP violations in Chrome apps.
- if (typeof self !== 'undefined') { return self; }
- if (typeof window !== 'undefined') { return window; }
- if (typeof global !== 'undefined') { return global; }
- throw new Error('unable to locate global object');
-}
-
-var global = getGlobal();
-
-module.exports = exports = global.fetch;
-
-// Needed for TypeScript and Webpack.
-exports.default = global.fetch.bind(global);
-
-exports.Headers = global.Headers;
-exports.Request = global.Request;
-exports.Response = global.Response;
\ No newline at end of file
diff --git a/scripts/metrics/node_modules/node-fetch/lib/index.es.js b/scripts/metrics/node_modules/node-fetch/lib/index.es.js
deleted file mode 100644
index 20ab807872f..00000000000
--- a/scripts/metrics/node_modules/node-fetch/lib/index.es.js
+++ /dev/null
@@ -1,1631 +0,0 @@
-process.emitWarning("The .es.js file is deprecated. Use .mjs instead.");
-
-import Stream from 'stream';
-import http from 'http';
-import Url from 'url';
-import https from 'https';
-import zlib from 'zlib';
-
-// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
-
-// fix for "Readable" isn't a named export issue
-const Readable = Stream.Readable;
-
-const BUFFER = Symbol('buffer');
-const TYPE = Symbol('type');
-
-class Blob {
- constructor() {
- this[TYPE] = '';
-
- const blobParts = arguments[0];
- const options = arguments[1];
-
- const buffers = [];
- let size = 0;
-
- if (blobParts) {
- const a = blobParts;
- const length = Number(a.length);
- for (let i = 0; i < length; i++) {
- const element = a[i];
- let buffer;
- if (element instanceof Buffer) {
- buffer = element;
- } else if (ArrayBuffer.isView(element)) {
- buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
- } else if (element instanceof ArrayBuffer) {
- buffer = Buffer.from(element);
- } else if (element instanceof Blob) {
- buffer = element[BUFFER];
- } else {
- buffer = Buffer.from(typeof element === 'string' ? element : String(element));
- }
- size += buffer.length;
- buffers.push(buffer);
- }
- }
-
- this[BUFFER] = Buffer.concat(buffers);
-
- let type = options && options.type !== undefined && String(options.type).toLowerCase();
- if (type && !/[^\u0020-\u007E]/.test(type)) {
- this[TYPE] = type;
- }
- }
- get size() {
- return this[BUFFER].length;
- }
- get type() {
- return this[TYPE];
- }
- text() {
- return Promise.resolve(this[BUFFER].toString());
- }
- arrayBuffer() {
- const buf = this[BUFFER];
- const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
- return Promise.resolve(ab);
- }
- stream() {
- const readable = new Readable();
- readable._read = function () {};
- readable.push(this[BUFFER]);
- readable.push(null);
- return readable;
- }
- toString() {
- return '[object Blob]';
- }
- slice() {
- const size = this.size;
-
- const start = arguments[0];
- const end = arguments[1];
- let relativeStart, relativeEnd;
- if (start === undefined) {
- relativeStart = 0;
- } else if (start < 0) {
- relativeStart = Math.max(size + start, 0);
- } else {
- relativeStart = Math.min(start, size);
- }
- if (end === undefined) {
- relativeEnd = size;
- } else if (end < 0) {
- relativeEnd = Math.max(size + end, 0);
- } else {
- relativeEnd = Math.min(end, size);
- }
- const span = Math.max(relativeEnd - relativeStart, 0);
-
- const buffer = this[BUFFER];
- const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
- const blob = new Blob([], { type: arguments[2] });
- blob[BUFFER] = slicedBuffer;
- return blob;
- }
-}
-
-Object.defineProperties(Blob.prototype, {
- size: { enumerable: true },
- type: { enumerable: true },
- slice: { enumerable: true }
-});
-
-Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
- value: 'Blob',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-/**
- * fetch-error.js
- *
- * FetchError interface for operational errors
- */
-
-/**
- * Create FetchError instance
- *
- * @param String message Error message for human
- * @param String type Error type for machine
- * @param String systemError For Node.js system error
- * @return FetchError
- */
-function FetchError(message, type, systemError) {
- Error.call(this, message);
-
- this.message = message;
- this.type = type;
-
- // when err.type is `system`, err.code contains system error code
- if (systemError) {
- this.code = this.errno = systemError.code;
- }
-
- // hide custom error implementation details from end-users
- Error.captureStackTrace(this, this.constructor);
-}
-
-FetchError.prototype = Object.create(Error.prototype);
-FetchError.prototype.constructor = FetchError;
-FetchError.prototype.name = 'FetchError';
-
-let convert;
-try {
- convert = require('encoding').convert;
-} catch (e) {}
-
-const INTERNALS = Symbol('Body internals');
-
-// fix an issue where "PassThrough" isn't a named export for node <10
-const PassThrough = Stream.PassThrough;
-
-/**
- * Body mixin
- *
- * Ref: https://fetch.spec.whatwg.org/#body
- *
- * @param Stream body Readable stream
- * @param Object opts Response options
- * @return Void
- */
-function Body(body) {
- var _this = this;
-
- var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
- _ref$size = _ref.size;
-
- let size = _ref$size === undefined ? 0 : _ref$size;
- var _ref$timeout = _ref.timeout;
- let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
-
- if (body == null) {
- // body is undefined or null
- body = null;
- } else if (isURLSearchParams(body)) {
- // body is a URLSearchParams
- body = Buffer.from(body.toString());
- } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
- // body is ArrayBuffer
- body = Buffer.from(body);
- } else if (ArrayBuffer.isView(body)) {
- // body is ArrayBufferView
- body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
- } else if (body instanceof Stream) ; else {
- // none of the above
- // coerce to string then buffer
- body = Buffer.from(String(body));
- }
- this[INTERNALS] = {
- body,
- disturbed: false,
- error: null
- };
- this.size = size;
- this.timeout = timeout;
-
- if (body instanceof Stream) {
- body.on('error', function (err) {
- const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
- _this[INTERNALS].error = error;
- });
- }
-}
-
-Body.prototype = {
- get body() {
- return this[INTERNALS].body;
- },
-
- get bodyUsed() {
- return this[INTERNALS].disturbed;
- },
-
- /**
- * Decode response as ArrayBuffer
- *
- * @return Promise
- */
- arrayBuffer() {
- return consumeBody.call(this).then(function (buf) {
- return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
- });
- },
-
- /**
- * Return raw response as Blob
- *
- * @return Promise
- */
- blob() {
- let ct = this.headers && this.headers.get('content-type') || '';
- return consumeBody.call(this).then(function (buf) {
- return Object.assign(
- // Prevent copying
- new Blob([], {
- type: ct.toLowerCase()
- }), {
- [BUFFER]: buf
- });
- });
- },
-
- /**
- * Decode response as json
- *
- * @return Promise
- */
- json() {
- var _this2 = this;
-
- return consumeBody.call(this).then(function (buffer) {
- try {
- return JSON.parse(buffer.toString());
- } catch (err) {
- return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
- }
- });
- },
-
- /**
- * Decode response as text
- *
- * @return Promise
- */
- text() {
- return consumeBody.call(this).then(function (buffer) {
- return buffer.toString();
- });
- },
-
- /**
- * Decode response as buffer (non-spec api)
- *
- * @return Promise
- */
- buffer() {
- return consumeBody.call(this);
- },
-
- /**
- * Decode response as text, while automatically detecting the encoding and
- * trying to decode to UTF-8 (non-spec api)
- *
- * @return Promise
- */
- textConverted() {
- var _this3 = this;
-
- return consumeBody.call(this).then(function (buffer) {
- return convertBody(buffer, _this3.headers);
- });
- }
-};
-
-// In browsers, all properties are enumerable.
-Object.defineProperties(Body.prototype, {
- body: { enumerable: true },
- bodyUsed: { enumerable: true },
- arrayBuffer: { enumerable: true },
- blob: { enumerable: true },
- json: { enumerable: true },
- text: { enumerable: true }
-});
-
-Body.mixIn = function (proto) {
- for (const name of Object.getOwnPropertyNames(Body.prototype)) {
- // istanbul ignore else: future proof
- if (!(name in proto)) {
- const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
- Object.defineProperty(proto, name, desc);
- }
- }
-};
-
-/**
- * Consume and convert an entire Body to a Buffer.
- *
- * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
- *
- * @return Promise
- */
-function consumeBody() {
- var _this4 = this;
-
- if (this[INTERNALS].disturbed) {
- return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
- }
-
- this[INTERNALS].disturbed = true;
-
- if (this[INTERNALS].error) {
- return Body.Promise.reject(this[INTERNALS].error);
- }
-
- let body = this.body;
-
- // body is null
- if (body === null) {
- return Body.Promise.resolve(Buffer.alloc(0));
- }
-
- // body is blob
- if (isBlob(body)) {
- body = body.stream();
- }
-
- // body is buffer
- if (Buffer.isBuffer(body)) {
- return Body.Promise.resolve(body);
- }
-
- // istanbul ignore if: should never happen
- if (!(body instanceof Stream)) {
- return Body.Promise.resolve(Buffer.alloc(0));
- }
-
- // body is stream
- // get ready to actually consume the body
- let accum = [];
- let accumBytes = 0;
- let abort = false;
-
- return new Body.Promise(function (resolve, reject) {
- let resTimeout;
-
- // allow timeout on slow response body
- if (_this4.timeout) {
- resTimeout = setTimeout(function () {
- abort = true;
- reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
- }, _this4.timeout);
- }
-
- // handle stream errors
- body.on('error', function (err) {
- if (err.name === 'AbortError') {
- // if the request was aborted, reject with this Error
- abort = true;
- reject(err);
- } else {
- // other errors, such as incorrect content-encoding
- reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
- }
- });
-
- body.on('data', function (chunk) {
- if (abort || chunk === null) {
- return;
- }
-
- if (_this4.size && accumBytes + chunk.length > _this4.size) {
- abort = true;
- reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
- return;
- }
-
- accumBytes += chunk.length;
- accum.push(chunk);
- });
-
- body.on('end', function () {
- if (abort) {
- return;
- }
-
- clearTimeout(resTimeout);
-
- try {
- resolve(Buffer.concat(accum, accumBytes));
- } catch (err) {
- // handle streams that have accumulated too much data (issue #414)
- reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
- }
- });
- });
-}
-
-/**
- * Detect buffer encoding and convert to target encoding
- * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
- *
- * @param Buffer buffer Incoming buffer
- * @param String encoding Target encoding
- * @return String
- */
-function convertBody(buffer, headers) {
- if (typeof convert !== 'function') {
- throw new Error('The package `encoding` must be installed to use the textConverted() function');
- }
-
- const ct = headers.get('content-type');
- let charset = 'utf-8';
- let res, str;
-
- // header
- if (ct) {
- res = /charset=([^;]*)/i.exec(ct);
- }
-
- // no charset in content type, peek at response body for at most 1024 bytes
- str = buffer.slice(0, 1024).toString();
-
- // html5
- if (!res && str) {
- res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;
-
- this[MAP] = Object.create(null);
-
- if (init instanceof Headers) {
- const rawHeaders = init.raw();
- const headerNames = Object.keys(rawHeaders);
-
- for (const headerName of headerNames) {
- for (const value of rawHeaders[headerName]) {
- this.append(headerName, value);
- }
- }
-
- return;
- }
-
- // We don't worry about converting prop to ByteString here as append()
- // will handle it.
- if (init == null) ; else if (typeof init === 'object') {
- const method = init[Symbol.iterator];
- if (method != null) {
- if (typeof method !== 'function') {
- throw new TypeError('Header pairs must be iterable');
- }
-
- // sequence>
- // Note: per spec we have to first exhaust the lists then process them
- const pairs = [];
- for (const pair of init) {
- if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
- throw new TypeError('Each header pair must be iterable');
- }
- pairs.push(Array.from(pair));
- }
-
- for (const pair of pairs) {
- if (pair.length !== 2) {
- throw new TypeError('Each header pair must be a name/value tuple');
- }
- this.append(pair[0], pair[1]);
- }
- } else {
- // record
- for (const key of Object.keys(init)) {
- const value = init[key];
- this.append(key, value);
- }
- }
- } else {
- throw new TypeError('Provided initializer must be an object');
- }
- }
-
- /**
- * Return combined header value given name
- *
- * @param String name Header name
- * @return Mixed
- */
- get(name) {
- name = `${name}`;
- validateName(name);
- const key = find(this[MAP], name);
- if (key === undefined) {
- return null;
- }
-
- return this[MAP][key].join(', ');
- }
-
- /**
- * Iterate over all headers
- *
- * @param Function callback Executed for each item with parameters (value, name, thisArg)
- * @param Boolean thisArg `this` context for callback function
- * @return Void
- */
- forEach(callback) {
- let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
-
- let pairs = getHeaders(this);
- let i = 0;
- while (i < pairs.length) {
- var _pairs$i = pairs[i];
- const name = _pairs$i[0],
- value = _pairs$i[1];
-
- callback.call(thisArg, value, name, this);
- pairs = getHeaders(this);
- i++;
- }
- }
-
- /**
- * Overwrite header values given name
- *
- * @param String name Header name
- * @param String value Header value
- * @return Void
- */
- set(name, value) {
- name = `${name}`;
- value = `${value}`;
- validateName(name);
- validateValue(value);
- const key = find(this[MAP], name);
- this[MAP][key !== undefined ? key : name] = [value];
- }
-
- /**
- * Append a value onto existing header
- *
- * @param String name Header name
- * @param String value Header value
- * @return Void
- */
- append(name, value) {
- name = `${name}`;
- value = `${value}`;
- validateName(name);
- validateValue(value);
- const key = find(this[MAP], name);
- if (key !== undefined) {
- this[MAP][key].push(value);
- } else {
- this[MAP][name] = [value];
- }
- }
-
- /**
- * Check for header name existence
- *
- * @param String name Header name
- * @return Boolean
- */
- has(name) {
- name = `${name}`;
- validateName(name);
- return find(this[MAP], name) !== undefined;
- }
-
- /**
- * Delete all header values given name
- *
- * @param String name Header name
- * @return Void
- */
- delete(name) {
- name = `${name}`;
- validateName(name);
- const key = find(this[MAP], name);
- if (key !== undefined) {
- delete this[MAP][key];
- }
- }
-
- /**
- * Return raw headers (non-spec api)
- *
- * @return Object
- */
- raw() {
- return this[MAP];
- }
-
- /**
- * Get an iterator on keys.
- *
- * @return Iterator
- */
- keys() {
- return createHeadersIterator(this, 'key');
- }
-
- /**
- * Get an iterator on values.
- *
- * @return Iterator
- */
- values() {
- return createHeadersIterator(this, 'value');
- }
-
- /**
- * Get an iterator on entries.
- *
- * This is the default iterator of the Headers object.
- *
- * @return Iterator
- */
- [Symbol.iterator]() {
- return createHeadersIterator(this, 'key+value');
- }
-}
-Headers.prototype.entries = Headers.prototype[Symbol.iterator];
-
-Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
- value: 'Headers',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-Object.defineProperties(Headers.prototype, {
- get: { enumerable: true },
- forEach: { enumerable: true },
- set: { enumerable: true },
- append: { enumerable: true },
- has: { enumerable: true },
- delete: { enumerable: true },
- keys: { enumerable: true },
- values: { enumerable: true },
- entries: { enumerable: true }
-});
-
-function getHeaders(headers) {
- let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
-
- const keys = Object.keys(headers[MAP]).sort();
- return keys.map(kind === 'key' ? function (k) {
- return k.toLowerCase();
- } : kind === 'value' ? function (k) {
- return headers[MAP][k].join(', ');
- } : function (k) {
- return [k.toLowerCase(), headers[MAP][k].join(', ')];
- });
-}
-
-const INTERNAL = Symbol('internal');
-
-function createHeadersIterator(target, kind) {
- const iterator = Object.create(HeadersIteratorPrototype);
- iterator[INTERNAL] = {
- target,
- kind,
- index: 0
- };
- return iterator;
-}
-
-const HeadersIteratorPrototype = Object.setPrototypeOf({
- next() {
- // istanbul ignore if
- if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
- throw new TypeError('Value of `this` is not a HeadersIterator');
- }
-
- var _INTERNAL = this[INTERNAL];
- const target = _INTERNAL.target,
- kind = _INTERNAL.kind,
- index = _INTERNAL.index;
-
- const values = getHeaders(target, kind);
- const len = values.length;
- if (index >= len) {
- return {
- value: undefined,
- done: true
- };
- }
-
- this[INTERNAL].index = index + 1;
-
- return {
- value: values[index],
- done: false
- };
- }
-}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
-
-Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
- value: 'HeadersIterator',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-/**
- * Export the Headers object in a form that Node.js can consume.
- *
- * @param Headers headers
- * @return Object
- */
-function exportNodeCompatibleHeaders(headers) {
- const obj = Object.assign({ __proto__: null }, headers[MAP]);
-
- // http.request() only supports string as Host header. This hack makes
- // specifying custom Host header possible.
- const hostHeaderKey = find(headers[MAP], 'Host');
- if (hostHeaderKey !== undefined) {
- obj[hostHeaderKey] = obj[hostHeaderKey][0];
- }
-
- return obj;
-}
-
-/**
- * Create a Headers object from an object of headers, ignoring those that do
- * not conform to HTTP grammar productions.
- *
- * @param Object obj Object of headers
- * @return Headers
- */
-function createHeadersLenient(obj) {
- const headers = new Headers();
- for (const name of Object.keys(obj)) {
- if (invalidTokenRegex.test(name)) {
- continue;
- }
- if (Array.isArray(obj[name])) {
- for (const val of obj[name]) {
- if (invalidHeaderCharRegex.test(val)) {
- continue;
- }
- if (headers[MAP][name] === undefined) {
- headers[MAP][name] = [val];
- } else {
- headers[MAP][name].push(val);
- }
- }
- } else if (!invalidHeaderCharRegex.test(obj[name])) {
- headers[MAP][name] = [obj[name]];
- }
- }
- return headers;
-}
-
-const INTERNALS$1 = Symbol('Response internals');
-
-// fix an issue where "STATUS_CODES" aren't a named export for node <10
-const STATUS_CODES = http.STATUS_CODES;
-
-/**
- * Response class
- *
- * @param Stream body Readable stream
- * @param Object opts Response options
- * @return Void
- */
-class Response {
- constructor() {
- let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
- let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-
- Body.call(this, body, opts);
-
- const status = opts.status || 200;
- const headers = new Headers(opts.headers);
-
- if (body != null && !headers.has('Content-Type')) {
- const contentType = extractContentType(body);
- if (contentType) {
- headers.append('Content-Type', contentType);
- }
- }
-
- this[INTERNALS$1] = {
- url: opts.url,
- status,
- statusText: opts.statusText || STATUS_CODES[status],
- headers,
- counter: opts.counter
- };
- }
-
- get url() {
- return this[INTERNALS$1].url;
- }
-
- get status() {
- return this[INTERNALS$1].status;
- }
-
- /**
- * Convenience property representing if the request ended normally
- */
- get ok() {
- return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
- }
-
- get redirected() {
- return this[INTERNALS$1].counter > 0;
- }
-
- get statusText() {
- return this[INTERNALS$1].statusText;
- }
-
- get headers() {
- return this[INTERNALS$1].headers;
- }
-
- /**
- * Clone this response
- *
- * @return Response
- */
- clone() {
- return new Response(clone(this), {
- url: this.url,
- status: this.status,
- statusText: this.statusText,
- headers: this.headers,
- ok: this.ok,
- redirected: this.redirected
- });
- }
-}
-
-Body.mixIn(Response.prototype);
-
-Object.defineProperties(Response.prototype, {
- url: { enumerable: true },
- status: { enumerable: true },
- ok: { enumerable: true },
- redirected: { enumerable: true },
- statusText: { enumerable: true },
- headers: { enumerable: true },
- clone: { enumerable: true }
-});
-
-Object.defineProperty(Response.prototype, Symbol.toStringTag, {
- value: 'Response',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-const INTERNALS$2 = Symbol('Request internals');
-
-// fix an issue where "format", "parse" aren't a named export for node <10
-const parse_url = Url.parse;
-const format_url = Url.format;
-
-const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
-
-/**
- * Check if a value is an instance of Request.
- *
- * @param Mixed input
- * @return Boolean
- */
-function isRequest(input) {
- return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
-}
-
-function isAbortSignal(signal) {
- const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
- return !!(proto && proto.constructor.name === 'AbortSignal');
-}
-
-/**
- * Request class
- *
- * @param Mixed input Url or Request instance
- * @param Object init Custom options
- * @return Void
- */
-class Request {
- constructor(input) {
- let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-
- let parsedURL;
-
- // normalize input
- if (!isRequest(input)) {
- if (input && input.href) {
- // in order to support Node.js' Url objects; though WHATWG's URL objects
- // will fall into this branch also (since their `toString()` will return
- // `href` property anyway)
- parsedURL = parse_url(input.href);
- } else {
- // coerce input to a string before attempting to parse
- parsedURL = parse_url(`${input}`);
- }
- input = {};
- } else {
- parsedURL = parse_url(input.url);
- }
-
- let method = init.method || input.method || 'GET';
- method = method.toUpperCase();
-
- if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
- throw new TypeError('Request with GET/HEAD method cannot have body');
- }
-
- let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
-
- Body.call(this, inputBody, {
- timeout: init.timeout || input.timeout || 0,
- size: init.size || input.size || 0
- });
-
- const headers = new Headers(init.headers || input.headers || {});
-
- if (inputBody != null && !headers.has('Content-Type')) {
- const contentType = extractContentType(inputBody);
- if (contentType) {
- headers.append('Content-Type', contentType);
- }
- }
-
- let signal = isRequest(input) ? input.signal : null;
- if ('signal' in init) signal = init.signal;
-
- if (signal != null && !isAbortSignal(signal)) {
- throw new TypeError('Expected signal to be an instanceof AbortSignal');
- }
-
- this[INTERNALS$2] = {
- method,
- redirect: init.redirect || input.redirect || 'follow',
- headers,
- parsedURL,
- signal
- };
-
- // node-fetch-only options
- this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
- this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
- this.counter = init.counter || input.counter || 0;
- this.agent = init.agent || input.agent;
- }
-
- get method() {
- return this[INTERNALS$2].method;
- }
-
- get url() {
- return format_url(this[INTERNALS$2].parsedURL);
- }
-
- get headers() {
- return this[INTERNALS$2].headers;
- }
-
- get redirect() {
- return this[INTERNALS$2].redirect;
- }
-
- get signal() {
- return this[INTERNALS$2].signal;
- }
-
- /**
- * Clone this request
- *
- * @return Request
- */
- clone() {
- return new Request(this);
- }
-}
-
-Body.mixIn(Request.prototype);
-
-Object.defineProperty(Request.prototype, Symbol.toStringTag, {
- value: 'Request',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-Object.defineProperties(Request.prototype, {
- method: { enumerable: true },
- url: { enumerable: true },
- headers: { enumerable: true },
- redirect: { enumerable: true },
- clone: { enumerable: true },
- signal: { enumerable: true }
-});
-
-/**
- * Convert a Request to Node.js http request options.
- *
- * @param Request A Request instance
- * @return Object The options object to be passed to http.request
- */
-function getNodeRequestOptions(request) {
- const parsedURL = request[INTERNALS$2].parsedURL;
- const headers = new Headers(request[INTERNALS$2].headers);
-
- // fetch step 1.3
- if (!headers.has('Accept')) {
- headers.set('Accept', '*/*');
- }
-
- // Basic fetch
- if (!parsedURL.protocol || !parsedURL.hostname) {
- throw new TypeError('Only absolute URLs are supported');
- }
-
- if (!/^https?:$/.test(parsedURL.protocol)) {
- throw new TypeError('Only HTTP(S) protocols are supported');
- }
-
- if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
- throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
- }
-
- // HTTP-network-or-cache fetch steps 2.4-2.7
- let contentLengthValue = null;
- if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
- contentLengthValue = '0';
- }
- if (request.body != null) {
- const totalBytes = getTotalBytes(request);
- if (typeof totalBytes === 'number') {
- contentLengthValue = String(totalBytes);
- }
- }
- if (contentLengthValue) {
- headers.set('Content-Length', contentLengthValue);
- }
-
- // HTTP-network-or-cache fetch step 2.11
- if (!headers.has('User-Agent')) {
- headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
- }
-
- // HTTP-network-or-cache fetch step 2.15
- if (request.compress && !headers.has('Accept-Encoding')) {
- headers.set('Accept-Encoding', 'gzip,deflate');
- }
-
- if (!headers.has('Connection') && !request.agent) {
- headers.set('Connection', 'close');
- }
-
- // HTTP-network fetch step 4.2
- // chunked encoding is handled by Node.js
-
- return Object.assign({}, parsedURL, {
- method: request.method,
- headers: exportNodeCompatibleHeaders(headers),
- agent: request.agent
- });
-}
-
-/**
- * abort-error.js
- *
- * AbortError interface for cancelled requests
- */
-
-/**
- * Create AbortError instance
- *
- * @param String message Error message for human
- * @return AbortError
- */
-function AbortError(message) {
- Error.call(this, message);
-
- this.type = 'aborted';
- this.message = message;
-
- // hide custom error implementation details from end-users
- Error.captureStackTrace(this, this.constructor);
-}
-
-AbortError.prototype = Object.create(Error.prototype);
-AbortError.prototype.constructor = AbortError;
-AbortError.prototype.name = 'AbortError';
-
-// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
-const PassThrough$1 = Stream.PassThrough;
-const resolve_url = Url.resolve;
-
-/**
- * Fetch function
- *
- * @param Mixed url Absolute url or Request instance
- * @param Object opts Fetch options
- * @return Promise
- */
-function fetch(url, opts) {
-
- // allow custom promise
- if (!fetch.Promise) {
- throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
- }
-
- Body.Promise = fetch.Promise;
-
- // wrap http.request into fetch
- return new fetch.Promise(function (resolve, reject) {
- // build request object
- const request = new Request(url, opts);
- const options = getNodeRequestOptions(request);
-
- const send = (options.protocol === 'https:' ? https : http).request;
- const signal = request.signal;
-
- let response = null;
-
- const abort = function abort() {
- let error = new AbortError('The user aborted a request.');
- reject(error);
- if (request.body && request.body instanceof Stream.Readable) {
- request.body.destroy(error);
- }
- if (!response || !response.body) return;
- response.body.emit('error', error);
- };
-
- if (signal && signal.aborted) {
- abort();
- return;
- }
-
- const abortAndFinalize = function abortAndFinalize() {
- abort();
- finalize();
- };
-
- // send request
- const req = send(options);
- let reqTimeout;
-
- if (signal) {
- signal.addEventListener('abort', abortAndFinalize);
- }
-
- function finalize() {
- req.abort();
- if (signal) signal.removeEventListener('abort', abortAndFinalize);
- clearTimeout(reqTimeout);
- }
-
- if (request.timeout) {
- req.once('socket', function (socket) {
- reqTimeout = setTimeout(function () {
- reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
- finalize();
- }, request.timeout);
- });
- }
-
- req.on('error', function (err) {
- reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
- finalize();
- });
-
- req.on('response', function (res) {
- clearTimeout(reqTimeout);
-
- const headers = createHeadersLenient(res.headers);
-
- // HTTP fetch step 5
- if (fetch.isRedirect(res.statusCode)) {
- // HTTP fetch step 5.2
- const location = headers.get('Location');
-
- // HTTP fetch step 5.3
- const locationURL = location === null ? null : resolve_url(request.url, location);
-
- // HTTP fetch step 5.5
- switch (request.redirect) {
- case 'error':
- reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect'));
- finalize();
- return;
- case 'manual':
- // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
- if (locationURL !== null) {
- // handle corrupted header
- try {
- headers.set('Location', locationURL);
- } catch (err) {
- // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
- reject(err);
- }
- }
- break;
- case 'follow':
- // HTTP-redirect fetch step 2
- if (locationURL === null) {
- break;
- }
-
- // HTTP-redirect fetch step 5
- if (request.counter >= request.follow) {
- reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
- finalize();
- return;
- }
-
- // HTTP-redirect fetch step 6 (counter increment)
- // Create a new Request object.
- const requestOpts = {
- headers: new Headers(request.headers),
- follow: request.follow,
- counter: request.counter + 1,
- agent: request.agent,
- compress: request.compress,
- method: request.method,
- body: request.body,
- signal: request.signal,
- timeout: request.timeout
- };
-
- // HTTP-redirect fetch step 9
- if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
- reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
- finalize();
- return;
- }
-
- // HTTP-redirect fetch step 11
- if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
- requestOpts.method = 'GET';
- requestOpts.body = undefined;
- requestOpts.headers.delete('content-length');
- }
-
- // HTTP-redirect fetch step 15
- resolve(fetch(new Request(locationURL, requestOpts)));
- finalize();
- return;
- }
- }
-
- // prepare response
- res.once('end', function () {
- if (signal) signal.removeEventListener('abort', abortAndFinalize);
- });
- let body = res.pipe(new PassThrough$1());
-
- const response_options = {
- url: request.url,
- status: res.statusCode,
- statusText: res.statusMessage,
- headers: headers,
- size: request.size,
- timeout: request.timeout,
- counter: request.counter
- };
-
- // HTTP-network fetch step 12.1.1.3
- const codings = headers.get('Content-Encoding');
-
- // HTTP-network fetch step 12.1.1.4: handle content codings
-
- // in following scenarios we ignore compression support
- // 1. compression support is disabled
- // 2. HEAD request
- // 3. no Content-Encoding header
- // 4. no content response (204)
- // 5. content not modified response (304)
- if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
- response = new Response(body, response_options);
- resolve(response);
- return;
- }
-
- // For Node v6+
- // Be less strict when decoding compressed responses, since sometimes
- // servers send slightly invalid responses that are still accepted
- // by common browsers.
- // Always using Z_SYNC_FLUSH is what cURL does.
- const zlibOptions = {
- flush: zlib.Z_SYNC_FLUSH,
- finishFlush: zlib.Z_SYNC_FLUSH
- };
-
- // for gzip
- if (codings == 'gzip' || codings == 'x-gzip') {
- body = body.pipe(zlib.createGunzip(zlibOptions));
- response = new Response(body, response_options);
- resolve(response);
- return;
- }
-
- // for deflate
- if (codings == 'deflate' || codings == 'x-deflate') {
- // handle the infamous raw deflate response from old servers
- // a hack for old IIS and Apache servers
- const raw = res.pipe(new PassThrough$1());
- raw.once('data', function (chunk) {
- // see http://stackoverflow.com/questions/37519828
- if ((chunk[0] & 0x0F) === 0x08) {
- body = body.pipe(zlib.createInflate());
- } else {
- body = body.pipe(zlib.createInflateRaw());
- }
- response = new Response(body, response_options);
- resolve(response);
- });
- return;
- }
-
- // for br
- if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
- body = body.pipe(zlib.createBrotliDecompress());
- response = new Response(body, response_options);
- resolve(response);
- return;
- }
-
- // otherwise, use response as-is
- response = new Response(body, response_options);
- resolve(response);
- });
-
- writeToStream(req, request);
- });
-}
-/**
- * Redirect code matching
- *
- * @param Number code Status code
- * @return Boolean
- */
-fetch.isRedirect = function (code) {
- return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
-};
-
-// expose Promise
-fetch.Promise = global.Promise;
-
-export default fetch;
-export { Headers, Request, Response, FetchError };
diff --git a/scripts/metrics/node_modules/node-fetch/lib/index.js b/scripts/metrics/node_modules/node-fetch/lib/index.js
deleted file mode 100644
index 86c7c031229..00000000000
--- a/scripts/metrics/node_modules/node-fetch/lib/index.js
+++ /dev/null
@@ -1,1640 +0,0 @@
-'use strict';
-
-Object.defineProperty(exports, '__esModule', { value: true });
-
-function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
-
-var Stream = _interopDefault(require('stream'));
-var http = _interopDefault(require('http'));
-var Url = _interopDefault(require('url'));
-var https = _interopDefault(require('https'));
-var zlib = _interopDefault(require('zlib'));
-
-// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
-
-// fix for "Readable" isn't a named export issue
-const Readable = Stream.Readable;
-
-const BUFFER = Symbol('buffer');
-const TYPE = Symbol('type');
-
-class Blob {
- constructor() {
- this[TYPE] = '';
-
- const blobParts = arguments[0];
- const options = arguments[1];
-
- const buffers = [];
- let size = 0;
-
- if (blobParts) {
- const a = blobParts;
- const length = Number(a.length);
- for (let i = 0; i < length; i++) {
- const element = a[i];
- let buffer;
- if (element instanceof Buffer) {
- buffer = element;
- } else if (ArrayBuffer.isView(element)) {
- buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
- } else if (element instanceof ArrayBuffer) {
- buffer = Buffer.from(element);
- } else if (element instanceof Blob) {
- buffer = element[BUFFER];
- } else {
- buffer = Buffer.from(typeof element === 'string' ? element : String(element));
- }
- size += buffer.length;
- buffers.push(buffer);
- }
- }
-
- this[BUFFER] = Buffer.concat(buffers);
-
- let type = options && options.type !== undefined && String(options.type).toLowerCase();
- if (type && !/[^\u0020-\u007E]/.test(type)) {
- this[TYPE] = type;
- }
- }
- get size() {
- return this[BUFFER].length;
- }
- get type() {
- return this[TYPE];
- }
- text() {
- return Promise.resolve(this[BUFFER].toString());
- }
- arrayBuffer() {
- const buf = this[BUFFER];
- const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
- return Promise.resolve(ab);
- }
- stream() {
- const readable = new Readable();
- readable._read = function () {};
- readable.push(this[BUFFER]);
- readable.push(null);
- return readable;
- }
- toString() {
- return '[object Blob]';
- }
- slice() {
- const size = this.size;
-
- const start = arguments[0];
- const end = arguments[1];
- let relativeStart, relativeEnd;
- if (start === undefined) {
- relativeStart = 0;
- } else if (start < 0) {
- relativeStart = Math.max(size + start, 0);
- } else {
- relativeStart = Math.min(start, size);
- }
- if (end === undefined) {
- relativeEnd = size;
- } else if (end < 0) {
- relativeEnd = Math.max(size + end, 0);
- } else {
- relativeEnd = Math.min(end, size);
- }
- const span = Math.max(relativeEnd - relativeStart, 0);
-
- const buffer = this[BUFFER];
- const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
- const blob = new Blob([], { type: arguments[2] });
- blob[BUFFER] = slicedBuffer;
- return blob;
- }
-}
-
-Object.defineProperties(Blob.prototype, {
- size: { enumerable: true },
- type: { enumerable: true },
- slice: { enumerable: true }
-});
-
-Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
- value: 'Blob',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-/**
- * fetch-error.js
- *
- * FetchError interface for operational errors
- */
-
-/**
- * Create FetchError instance
- *
- * @param String message Error message for human
- * @param String type Error type for machine
- * @param String systemError For Node.js system error
- * @return FetchError
- */
-function FetchError(message, type, systemError) {
- Error.call(this, message);
-
- this.message = message;
- this.type = type;
-
- // when err.type is `system`, err.code contains system error code
- if (systemError) {
- this.code = this.errno = systemError.code;
- }
-
- // hide custom error implementation details from end-users
- Error.captureStackTrace(this, this.constructor);
-}
-
-FetchError.prototype = Object.create(Error.prototype);
-FetchError.prototype.constructor = FetchError;
-FetchError.prototype.name = 'FetchError';
-
-let convert;
-try {
- convert = require('encoding').convert;
-} catch (e) {}
-
-const INTERNALS = Symbol('Body internals');
-
-// fix an issue where "PassThrough" isn't a named export for node <10
-const PassThrough = Stream.PassThrough;
-
-/**
- * Body mixin
- *
- * Ref: https://fetch.spec.whatwg.org/#body
- *
- * @param Stream body Readable stream
- * @param Object opts Response options
- * @return Void
- */
-function Body(body) {
- var _this = this;
-
- var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
- _ref$size = _ref.size;
-
- let size = _ref$size === undefined ? 0 : _ref$size;
- var _ref$timeout = _ref.timeout;
- let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
-
- if (body == null) {
- // body is undefined or null
- body = null;
- } else if (isURLSearchParams(body)) {
- // body is a URLSearchParams
- body = Buffer.from(body.toString());
- } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
- // body is ArrayBuffer
- body = Buffer.from(body);
- } else if (ArrayBuffer.isView(body)) {
- // body is ArrayBufferView
- body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
- } else if (body instanceof Stream) ; else {
- // none of the above
- // coerce to string then buffer
- body = Buffer.from(String(body));
- }
- this[INTERNALS] = {
- body,
- disturbed: false,
- error: null
- };
- this.size = size;
- this.timeout = timeout;
-
- if (body instanceof Stream) {
- body.on('error', function (err) {
- const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
- _this[INTERNALS].error = error;
- });
- }
-}
-
-Body.prototype = {
- get body() {
- return this[INTERNALS].body;
- },
-
- get bodyUsed() {
- return this[INTERNALS].disturbed;
- },
-
- /**
- * Decode response as ArrayBuffer
- *
- * @return Promise
- */
- arrayBuffer() {
- return consumeBody.call(this).then(function (buf) {
- return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
- });
- },
-
- /**
- * Return raw response as Blob
- *
- * @return Promise
- */
- blob() {
- let ct = this.headers && this.headers.get('content-type') || '';
- return consumeBody.call(this).then(function (buf) {
- return Object.assign(
- // Prevent copying
- new Blob([], {
- type: ct.toLowerCase()
- }), {
- [BUFFER]: buf
- });
- });
- },
-
- /**
- * Decode response as json
- *
- * @return Promise
- */
- json() {
- var _this2 = this;
-
- return consumeBody.call(this).then(function (buffer) {
- try {
- return JSON.parse(buffer.toString());
- } catch (err) {
- return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
- }
- });
- },
-
- /**
- * Decode response as text
- *
- * @return Promise
- */
- text() {
- return consumeBody.call(this).then(function (buffer) {
- return buffer.toString();
- });
- },
-
- /**
- * Decode response as buffer (non-spec api)
- *
- * @return Promise
- */
- buffer() {
- return consumeBody.call(this);
- },
-
- /**
- * Decode response as text, while automatically detecting the encoding and
- * trying to decode to UTF-8 (non-spec api)
- *
- * @return Promise
- */
- textConverted() {
- var _this3 = this;
-
- return consumeBody.call(this).then(function (buffer) {
- return convertBody(buffer, _this3.headers);
- });
- }
-};
-
-// In browsers, all properties are enumerable.
-Object.defineProperties(Body.prototype, {
- body: { enumerable: true },
- bodyUsed: { enumerable: true },
- arrayBuffer: { enumerable: true },
- blob: { enumerable: true },
- json: { enumerable: true },
- text: { enumerable: true }
-});
-
-Body.mixIn = function (proto) {
- for (const name of Object.getOwnPropertyNames(Body.prototype)) {
- // istanbul ignore else: future proof
- if (!(name in proto)) {
- const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
- Object.defineProperty(proto, name, desc);
- }
- }
-};
-
-/**
- * Consume and convert an entire Body to a Buffer.
- *
- * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
- *
- * @return Promise
- */
-function consumeBody() {
- var _this4 = this;
-
- if (this[INTERNALS].disturbed) {
- return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
- }
-
- this[INTERNALS].disturbed = true;
-
- if (this[INTERNALS].error) {
- return Body.Promise.reject(this[INTERNALS].error);
- }
-
- let body = this.body;
-
- // body is null
- if (body === null) {
- return Body.Promise.resolve(Buffer.alloc(0));
- }
-
- // body is blob
- if (isBlob(body)) {
- body = body.stream();
- }
-
- // body is buffer
- if (Buffer.isBuffer(body)) {
- return Body.Promise.resolve(body);
- }
-
- // istanbul ignore if: should never happen
- if (!(body instanceof Stream)) {
- return Body.Promise.resolve(Buffer.alloc(0));
- }
-
- // body is stream
- // get ready to actually consume the body
- let accum = [];
- let accumBytes = 0;
- let abort = false;
-
- return new Body.Promise(function (resolve, reject) {
- let resTimeout;
-
- // allow timeout on slow response body
- if (_this4.timeout) {
- resTimeout = setTimeout(function () {
- abort = true;
- reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
- }, _this4.timeout);
- }
-
- // handle stream errors
- body.on('error', function (err) {
- if (err.name === 'AbortError') {
- // if the request was aborted, reject with this Error
- abort = true;
- reject(err);
- } else {
- // other errors, such as incorrect content-encoding
- reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
- }
- });
-
- body.on('data', function (chunk) {
- if (abort || chunk === null) {
- return;
- }
-
- if (_this4.size && accumBytes + chunk.length > _this4.size) {
- abort = true;
- reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
- return;
- }
-
- accumBytes += chunk.length;
- accum.push(chunk);
- });
-
- body.on('end', function () {
- if (abort) {
- return;
- }
-
- clearTimeout(resTimeout);
-
- try {
- resolve(Buffer.concat(accum, accumBytes));
- } catch (err) {
- // handle streams that have accumulated too much data (issue #414)
- reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
- }
- });
- });
-}
-
-/**
- * Detect buffer encoding and convert to target encoding
- * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
- *
- * @param Buffer buffer Incoming buffer
- * @param String encoding Target encoding
- * @return String
- */
-function convertBody(buffer, headers) {
- if (typeof convert !== 'function') {
- throw new Error('The package `encoding` must be installed to use the textConverted() function');
- }
-
- const ct = headers.get('content-type');
- let charset = 'utf-8';
- let res, str;
-
- // header
- if (ct) {
- res = /charset=([^;]*)/i.exec(ct);
- }
-
- // no charset in content type, peek at response body for at most 1024 bytes
- str = buffer.slice(0, 1024).toString();
-
- // html5
- if (!res && str) {
- res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;
-
- this[MAP] = Object.create(null);
-
- if (init instanceof Headers) {
- const rawHeaders = init.raw();
- const headerNames = Object.keys(rawHeaders);
-
- for (const headerName of headerNames) {
- for (const value of rawHeaders[headerName]) {
- this.append(headerName, value);
- }
- }
-
- return;
- }
-
- // We don't worry about converting prop to ByteString here as append()
- // will handle it.
- if (init == null) ; else if (typeof init === 'object') {
- const method = init[Symbol.iterator];
- if (method != null) {
- if (typeof method !== 'function') {
- throw new TypeError('Header pairs must be iterable');
- }
-
- // sequence>
- // Note: per spec we have to first exhaust the lists then process them
- const pairs = [];
- for (const pair of init) {
- if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
- throw new TypeError('Each header pair must be iterable');
- }
- pairs.push(Array.from(pair));
- }
-
- for (const pair of pairs) {
- if (pair.length !== 2) {
- throw new TypeError('Each header pair must be a name/value tuple');
- }
- this.append(pair[0], pair[1]);
- }
- } else {
- // record
- for (const key of Object.keys(init)) {
- const value = init[key];
- this.append(key, value);
- }
- }
- } else {
- throw new TypeError('Provided initializer must be an object');
- }
- }
-
- /**
- * Return combined header value given name
- *
- * @param String name Header name
- * @return Mixed
- */
- get(name) {
- name = `${name}`;
- validateName(name);
- const key = find(this[MAP], name);
- if (key === undefined) {
- return null;
- }
-
- return this[MAP][key].join(', ');
- }
-
- /**
- * Iterate over all headers
- *
- * @param Function callback Executed for each item with parameters (value, name, thisArg)
- * @param Boolean thisArg `this` context for callback function
- * @return Void
- */
- forEach(callback) {
- let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
-
- let pairs = getHeaders(this);
- let i = 0;
- while (i < pairs.length) {
- var _pairs$i = pairs[i];
- const name = _pairs$i[0],
- value = _pairs$i[1];
-
- callback.call(thisArg, value, name, this);
- pairs = getHeaders(this);
- i++;
- }
- }
-
- /**
- * Overwrite header values given name
- *
- * @param String name Header name
- * @param String value Header value
- * @return Void
- */
- set(name, value) {
- name = `${name}`;
- value = `${value}`;
- validateName(name);
- validateValue(value);
- const key = find(this[MAP], name);
- this[MAP][key !== undefined ? key : name] = [value];
- }
-
- /**
- * Append a value onto existing header
- *
- * @param String name Header name
- * @param String value Header value
- * @return Void
- */
- append(name, value) {
- name = `${name}`;
- value = `${value}`;
- validateName(name);
- validateValue(value);
- const key = find(this[MAP], name);
- if (key !== undefined) {
- this[MAP][key].push(value);
- } else {
- this[MAP][name] = [value];
- }
- }
-
- /**
- * Check for header name existence
- *
- * @param String name Header name
- * @return Boolean
- */
- has(name) {
- name = `${name}`;
- validateName(name);
- return find(this[MAP], name) !== undefined;
- }
-
- /**
- * Delete all header values given name
- *
- * @param String name Header name
- * @return Void
- */
- delete(name) {
- name = `${name}`;
- validateName(name);
- const key = find(this[MAP], name);
- if (key !== undefined) {
- delete this[MAP][key];
- }
- }
-
- /**
- * Return raw headers (non-spec api)
- *
- * @return Object
- */
- raw() {
- return this[MAP];
- }
-
- /**
- * Get an iterator on keys.
- *
- * @return Iterator
- */
- keys() {
- return createHeadersIterator(this, 'key');
- }
-
- /**
- * Get an iterator on values.
- *
- * @return Iterator
- */
- values() {
- return createHeadersIterator(this, 'value');
- }
-
- /**
- * Get an iterator on entries.
- *
- * This is the default iterator of the Headers object.
- *
- * @return Iterator
- */
- [Symbol.iterator]() {
- return createHeadersIterator(this, 'key+value');
- }
-}
-Headers.prototype.entries = Headers.prototype[Symbol.iterator];
-
-Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
- value: 'Headers',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-Object.defineProperties(Headers.prototype, {
- get: { enumerable: true },
- forEach: { enumerable: true },
- set: { enumerable: true },
- append: { enumerable: true },
- has: { enumerable: true },
- delete: { enumerable: true },
- keys: { enumerable: true },
- values: { enumerable: true },
- entries: { enumerable: true }
-});
-
-function getHeaders(headers) {
- let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
-
- const keys = Object.keys(headers[MAP]).sort();
- return keys.map(kind === 'key' ? function (k) {
- return k.toLowerCase();
- } : kind === 'value' ? function (k) {
- return headers[MAP][k].join(', ');
- } : function (k) {
- return [k.toLowerCase(), headers[MAP][k].join(', ')];
- });
-}
-
-const INTERNAL = Symbol('internal');
-
-function createHeadersIterator(target, kind) {
- const iterator = Object.create(HeadersIteratorPrototype);
- iterator[INTERNAL] = {
- target,
- kind,
- index: 0
- };
- return iterator;
-}
-
-const HeadersIteratorPrototype = Object.setPrototypeOf({
- next() {
- // istanbul ignore if
- if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
- throw new TypeError('Value of `this` is not a HeadersIterator');
- }
-
- var _INTERNAL = this[INTERNAL];
- const target = _INTERNAL.target,
- kind = _INTERNAL.kind,
- index = _INTERNAL.index;
-
- const values = getHeaders(target, kind);
- const len = values.length;
- if (index >= len) {
- return {
- value: undefined,
- done: true
- };
- }
-
- this[INTERNAL].index = index + 1;
-
- return {
- value: values[index],
- done: false
- };
- }
-}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
-
-Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
- value: 'HeadersIterator',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-/**
- * Export the Headers object in a form that Node.js can consume.
- *
- * @param Headers headers
- * @return Object
- */
-function exportNodeCompatibleHeaders(headers) {
- const obj = Object.assign({ __proto__: null }, headers[MAP]);
-
- // http.request() only supports string as Host header. This hack makes
- // specifying custom Host header possible.
- const hostHeaderKey = find(headers[MAP], 'Host');
- if (hostHeaderKey !== undefined) {
- obj[hostHeaderKey] = obj[hostHeaderKey][0];
- }
-
- return obj;
-}
-
-/**
- * Create a Headers object from an object of headers, ignoring those that do
- * not conform to HTTP grammar productions.
- *
- * @param Object obj Object of headers
- * @return Headers
- */
-function createHeadersLenient(obj) {
- const headers = new Headers();
- for (const name of Object.keys(obj)) {
- if (invalidTokenRegex.test(name)) {
- continue;
- }
- if (Array.isArray(obj[name])) {
- for (const val of obj[name]) {
- if (invalidHeaderCharRegex.test(val)) {
- continue;
- }
- if (headers[MAP][name] === undefined) {
- headers[MAP][name] = [val];
- } else {
- headers[MAP][name].push(val);
- }
- }
- } else if (!invalidHeaderCharRegex.test(obj[name])) {
- headers[MAP][name] = [obj[name]];
- }
- }
- return headers;
-}
-
-const INTERNALS$1 = Symbol('Response internals');
-
-// fix an issue where "STATUS_CODES" aren't a named export for node <10
-const STATUS_CODES = http.STATUS_CODES;
-
-/**
- * Response class
- *
- * @param Stream body Readable stream
- * @param Object opts Response options
- * @return Void
- */
-class Response {
- constructor() {
- let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
- let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-
- Body.call(this, body, opts);
-
- const status = opts.status || 200;
- const headers = new Headers(opts.headers);
-
- if (body != null && !headers.has('Content-Type')) {
- const contentType = extractContentType(body);
- if (contentType) {
- headers.append('Content-Type', contentType);
- }
- }
-
- this[INTERNALS$1] = {
- url: opts.url,
- status,
- statusText: opts.statusText || STATUS_CODES[status],
- headers,
- counter: opts.counter
- };
- }
-
- get url() {
- return this[INTERNALS$1].url;
- }
-
- get status() {
- return this[INTERNALS$1].status;
- }
-
- /**
- * Convenience property representing if the request ended normally
- */
- get ok() {
- return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
- }
-
- get redirected() {
- return this[INTERNALS$1].counter > 0;
- }
-
- get statusText() {
- return this[INTERNALS$1].statusText;
- }
-
- get headers() {
- return this[INTERNALS$1].headers;
- }
-
- /**
- * Clone this response
- *
- * @return Response
- */
- clone() {
- return new Response(clone(this), {
- url: this.url,
- status: this.status,
- statusText: this.statusText,
- headers: this.headers,
- ok: this.ok,
- redirected: this.redirected
- });
- }
-}
-
-Body.mixIn(Response.prototype);
-
-Object.defineProperties(Response.prototype, {
- url: { enumerable: true },
- status: { enumerable: true },
- ok: { enumerable: true },
- redirected: { enumerable: true },
- statusText: { enumerable: true },
- headers: { enumerable: true },
- clone: { enumerable: true }
-});
-
-Object.defineProperty(Response.prototype, Symbol.toStringTag, {
- value: 'Response',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-const INTERNALS$2 = Symbol('Request internals');
-
-// fix an issue where "format", "parse" aren't a named export for node <10
-const parse_url = Url.parse;
-const format_url = Url.format;
-
-const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
-
-/**
- * Check if a value is an instance of Request.
- *
- * @param Mixed input
- * @return Boolean
- */
-function isRequest(input) {
- return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
-}
-
-function isAbortSignal(signal) {
- const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
- return !!(proto && proto.constructor.name === 'AbortSignal');
-}
-
-/**
- * Request class
- *
- * @param Mixed input Url or Request instance
- * @param Object init Custom options
- * @return Void
- */
-class Request {
- constructor(input) {
- let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-
- let parsedURL;
-
- // normalize input
- if (!isRequest(input)) {
- if (input && input.href) {
- // in order to support Node.js' Url objects; though WHATWG's URL objects
- // will fall into this branch also (since their `toString()` will return
- // `href` property anyway)
- parsedURL = parse_url(input.href);
- } else {
- // coerce input to a string before attempting to parse
- parsedURL = parse_url(`${input}`);
- }
- input = {};
- } else {
- parsedURL = parse_url(input.url);
- }
-
- let method = init.method || input.method || 'GET';
- method = method.toUpperCase();
-
- if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
- throw new TypeError('Request with GET/HEAD method cannot have body');
- }
-
- let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
-
- Body.call(this, inputBody, {
- timeout: init.timeout || input.timeout || 0,
- size: init.size || input.size || 0
- });
-
- const headers = new Headers(init.headers || input.headers || {});
-
- if (inputBody != null && !headers.has('Content-Type')) {
- const contentType = extractContentType(inputBody);
- if (contentType) {
- headers.append('Content-Type', contentType);
- }
- }
-
- let signal = isRequest(input) ? input.signal : null;
- if ('signal' in init) signal = init.signal;
-
- if (signal != null && !isAbortSignal(signal)) {
- throw new TypeError('Expected signal to be an instanceof AbortSignal');
- }
-
- this[INTERNALS$2] = {
- method,
- redirect: init.redirect || input.redirect || 'follow',
- headers,
- parsedURL,
- signal
- };
-
- // node-fetch-only options
- this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
- this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
- this.counter = init.counter || input.counter || 0;
- this.agent = init.agent || input.agent;
- }
-
- get method() {
- return this[INTERNALS$2].method;
- }
-
- get url() {
- return format_url(this[INTERNALS$2].parsedURL);
- }
-
- get headers() {
- return this[INTERNALS$2].headers;
- }
-
- get redirect() {
- return this[INTERNALS$2].redirect;
- }
-
- get signal() {
- return this[INTERNALS$2].signal;
- }
-
- /**
- * Clone this request
- *
- * @return Request
- */
- clone() {
- return new Request(this);
- }
-}
-
-Body.mixIn(Request.prototype);
-
-Object.defineProperty(Request.prototype, Symbol.toStringTag, {
- value: 'Request',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-Object.defineProperties(Request.prototype, {
- method: { enumerable: true },
- url: { enumerable: true },
- headers: { enumerable: true },
- redirect: { enumerable: true },
- clone: { enumerable: true },
- signal: { enumerable: true }
-});
-
-/**
- * Convert a Request to Node.js http request options.
- *
- * @param Request A Request instance
- * @return Object The options object to be passed to http.request
- */
-function getNodeRequestOptions(request) {
- const parsedURL = request[INTERNALS$2].parsedURL;
- const headers = new Headers(request[INTERNALS$2].headers);
-
- // fetch step 1.3
- if (!headers.has('Accept')) {
- headers.set('Accept', '*/*');
- }
-
- // Basic fetch
- if (!parsedURL.protocol || !parsedURL.hostname) {
- throw new TypeError('Only absolute URLs are supported');
- }
-
- if (!/^https?:$/.test(parsedURL.protocol)) {
- throw new TypeError('Only HTTP(S) protocols are supported');
- }
-
- if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
- throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
- }
-
- // HTTP-network-or-cache fetch steps 2.4-2.7
- let contentLengthValue = null;
- if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
- contentLengthValue = '0';
- }
- if (request.body != null) {
- const totalBytes = getTotalBytes(request);
- if (typeof totalBytes === 'number') {
- contentLengthValue = String(totalBytes);
- }
- }
- if (contentLengthValue) {
- headers.set('Content-Length', contentLengthValue);
- }
-
- // HTTP-network-or-cache fetch step 2.11
- if (!headers.has('User-Agent')) {
- headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
- }
-
- // HTTP-network-or-cache fetch step 2.15
- if (request.compress && !headers.has('Accept-Encoding')) {
- headers.set('Accept-Encoding', 'gzip,deflate');
- }
-
- if (!headers.has('Connection') && !request.agent) {
- headers.set('Connection', 'close');
- }
-
- // HTTP-network fetch step 4.2
- // chunked encoding is handled by Node.js
-
- return Object.assign({}, parsedURL, {
- method: request.method,
- headers: exportNodeCompatibleHeaders(headers),
- agent: request.agent
- });
-}
-
-/**
- * abort-error.js
- *
- * AbortError interface for cancelled requests
- */
-
-/**
- * Create AbortError instance
- *
- * @param String message Error message for human
- * @return AbortError
- */
-function AbortError(message) {
- Error.call(this, message);
-
- this.type = 'aborted';
- this.message = message;
-
- // hide custom error implementation details from end-users
- Error.captureStackTrace(this, this.constructor);
-}
-
-AbortError.prototype = Object.create(Error.prototype);
-AbortError.prototype.constructor = AbortError;
-AbortError.prototype.name = 'AbortError';
-
-// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
-const PassThrough$1 = Stream.PassThrough;
-const resolve_url = Url.resolve;
-
-/**
- * Fetch function
- *
- * @param Mixed url Absolute url or Request instance
- * @param Object opts Fetch options
- * @return Promise
- */
-function fetch(url, opts) {
-
- // allow custom promise
- if (!fetch.Promise) {
- throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
- }
-
- Body.Promise = fetch.Promise;
-
- // wrap http.request into fetch
- return new fetch.Promise(function (resolve, reject) {
- // build request object
- const request = new Request(url, opts);
- const options = getNodeRequestOptions(request);
-
- const send = (options.protocol === 'https:' ? https : http).request;
- const signal = request.signal;
-
- let response = null;
-
- const abort = function abort() {
- let error = new AbortError('The user aborted a request.');
- reject(error);
- if (request.body && request.body instanceof Stream.Readable) {
- request.body.destroy(error);
- }
- if (!response || !response.body) return;
- response.body.emit('error', error);
- };
-
- if (signal && signal.aborted) {
- abort();
- return;
- }
-
- const abortAndFinalize = function abortAndFinalize() {
- abort();
- finalize();
- };
-
- // send request
- const req = send(options);
- let reqTimeout;
-
- if (signal) {
- signal.addEventListener('abort', abortAndFinalize);
- }
-
- function finalize() {
- req.abort();
- if (signal) signal.removeEventListener('abort', abortAndFinalize);
- clearTimeout(reqTimeout);
- }
-
- if (request.timeout) {
- req.once('socket', function (socket) {
- reqTimeout = setTimeout(function () {
- reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
- finalize();
- }, request.timeout);
- });
- }
-
- req.on('error', function (err) {
- reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
- finalize();
- });
-
- req.on('response', function (res) {
- clearTimeout(reqTimeout);
-
- const headers = createHeadersLenient(res.headers);
-
- // HTTP fetch step 5
- if (fetch.isRedirect(res.statusCode)) {
- // HTTP fetch step 5.2
- const location = headers.get('Location');
-
- // HTTP fetch step 5.3
- const locationURL = location === null ? null : resolve_url(request.url, location);
-
- // HTTP fetch step 5.5
- switch (request.redirect) {
- case 'error':
- reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect'));
- finalize();
- return;
- case 'manual':
- // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
- if (locationURL !== null) {
- // handle corrupted header
- try {
- headers.set('Location', locationURL);
- } catch (err) {
- // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
- reject(err);
- }
- }
- break;
- case 'follow':
- // HTTP-redirect fetch step 2
- if (locationURL === null) {
- break;
- }
-
- // HTTP-redirect fetch step 5
- if (request.counter >= request.follow) {
- reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
- finalize();
- return;
- }
-
- // HTTP-redirect fetch step 6 (counter increment)
- // Create a new Request object.
- const requestOpts = {
- headers: new Headers(request.headers),
- follow: request.follow,
- counter: request.counter + 1,
- agent: request.agent,
- compress: request.compress,
- method: request.method,
- body: request.body,
- signal: request.signal,
- timeout: request.timeout
- };
-
- // HTTP-redirect fetch step 9
- if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
- reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
- finalize();
- return;
- }
-
- // HTTP-redirect fetch step 11
- if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
- requestOpts.method = 'GET';
- requestOpts.body = undefined;
- requestOpts.headers.delete('content-length');
- }
-
- // HTTP-redirect fetch step 15
- resolve(fetch(new Request(locationURL, requestOpts)));
- finalize();
- return;
- }
- }
-
- // prepare response
- res.once('end', function () {
- if (signal) signal.removeEventListener('abort', abortAndFinalize);
- });
- let body = res.pipe(new PassThrough$1());
-
- const response_options = {
- url: request.url,
- status: res.statusCode,
- statusText: res.statusMessage,
- headers: headers,
- size: request.size,
- timeout: request.timeout,
- counter: request.counter
- };
-
- // HTTP-network fetch step 12.1.1.3
- const codings = headers.get('Content-Encoding');
-
- // HTTP-network fetch step 12.1.1.4: handle content codings
-
- // in following scenarios we ignore compression support
- // 1. compression support is disabled
- // 2. HEAD request
- // 3. no Content-Encoding header
- // 4. no content response (204)
- // 5. content not modified response (304)
- if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
- response = new Response(body, response_options);
- resolve(response);
- return;
- }
-
- // For Node v6+
- // Be less strict when decoding compressed responses, since sometimes
- // servers send slightly invalid responses that are still accepted
- // by common browsers.
- // Always using Z_SYNC_FLUSH is what cURL does.
- const zlibOptions = {
- flush: zlib.Z_SYNC_FLUSH,
- finishFlush: zlib.Z_SYNC_FLUSH
- };
-
- // for gzip
- if (codings == 'gzip' || codings == 'x-gzip') {
- body = body.pipe(zlib.createGunzip(zlibOptions));
- response = new Response(body, response_options);
- resolve(response);
- return;
- }
-
- // for deflate
- if (codings == 'deflate' || codings == 'x-deflate') {
- // handle the infamous raw deflate response from old servers
- // a hack for old IIS and Apache servers
- const raw = res.pipe(new PassThrough$1());
- raw.once('data', function (chunk) {
- // see http://stackoverflow.com/questions/37519828
- if ((chunk[0] & 0x0F) === 0x08) {
- body = body.pipe(zlib.createInflate());
- } else {
- body = body.pipe(zlib.createInflateRaw());
- }
- response = new Response(body, response_options);
- resolve(response);
- });
- return;
- }
-
- // for br
- if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
- body = body.pipe(zlib.createBrotliDecompress());
- response = new Response(body, response_options);
- resolve(response);
- return;
- }
-
- // otherwise, use response as-is
- response = new Response(body, response_options);
- resolve(response);
- });
-
- writeToStream(req, request);
- });
-}
-/**
- * Redirect code matching
- *
- * @param Number code Status code
- * @return Boolean
- */
-fetch.isRedirect = function (code) {
- return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
-};
-
-// expose Promise
-fetch.Promise = global.Promise;
-
-module.exports = exports = fetch;
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.default = exports;
-exports.Headers = Headers;
-exports.Request = Request;
-exports.Response = Response;
-exports.FetchError = FetchError;
diff --git a/scripts/metrics/node_modules/node-fetch/lib/index.mjs b/scripts/metrics/node_modules/node-fetch/lib/index.mjs
deleted file mode 100644
index dca525658b4..00000000000
--- a/scripts/metrics/node_modules/node-fetch/lib/index.mjs
+++ /dev/null
@@ -1,1629 +0,0 @@
-import Stream from 'stream';
-import http from 'http';
-import Url from 'url';
-import https from 'https';
-import zlib from 'zlib';
-
-// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js
-
-// fix for "Readable" isn't a named export issue
-const Readable = Stream.Readable;
-
-const BUFFER = Symbol('buffer');
-const TYPE = Symbol('type');
-
-class Blob {
- constructor() {
- this[TYPE] = '';
-
- const blobParts = arguments[0];
- const options = arguments[1];
-
- const buffers = [];
- let size = 0;
-
- if (blobParts) {
- const a = blobParts;
- const length = Number(a.length);
- for (let i = 0; i < length; i++) {
- const element = a[i];
- let buffer;
- if (element instanceof Buffer) {
- buffer = element;
- } else if (ArrayBuffer.isView(element)) {
- buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);
- } else if (element instanceof ArrayBuffer) {
- buffer = Buffer.from(element);
- } else if (element instanceof Blob) {
- buffer = element[BUFFER];
- } else {
- buffer = Buffer.from(typeof element === 'string' ? element : String(element));
- }
- size += buffer.length;
- buffers.push(buffer);
- }
- }
-
- this[BUFFER] = Buffer.concat(buffers);
-
- let type = options && options.type !== undefined && String(options.type).toLowerCase();
- if (type && !/[^\u0020-\u007E]/.test(type)) {
- this[TYPE] = type;
- }
- }
- get size() {
- return this[BUFFER].length;
- }
- get type() {
- return this[TYPE];
- }
- text() {
- return Promise.resolve(this[BUFFER].toString());
- }
- arrayBuffer() {
- const buf = this[BUFFER];
- const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
- return Promise.resolve(ab);
- }
- stream() {
- const readable = new Readable();
- readable._read = function () {};
- readable.push(this[BUFFER]);
- readable.push(null);
- return readable;
- }
- toString() {
- return '[object Blob]';
- }
- slice() {
- const size = this.size;
-
- const start = arguments[0];
- const end = arguments[1];
- let relativeStart, relativeEnd;
- if (start === undefined) {
- relativeStart = 0;
- } else if (start < 0) {
- relativeStart = Math.max(size + start, 0);
- } else {
- relativeStart = Math.min(start, size);
- }
- if (end === undefined) {
- relativeEnd = size;
- } else if (end < 0) {
- relativeEnd = Math.max(size + end, 0);
- } else {
- relativeEnd = Math.min(end, size);
- }
- const span = Math.max(relativeEnd - relativeStart, 0);
-
- const buffer = this[BUFFER];
- const slicedBuffer = buffer.slice(relativeStart, relativeStart + span);
- const blob = new Blob([], { type: arguments[2] });
- blob[BUFFER] = slicedBuffer;
- return blob;
- }
-}
-
-Object.defineProperties(Blob.prototype, {
- size: { enumerable: true },
- type: { enumerable: true },
- slice: { enumerable: true }
-});
-
-Object.defineProperty(Blob.prototype, Symbol.toStringTag, {
- value: 'Blob',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-/**
- * fetch-error.js
- *
- * FetchError interface for operational errors
- */
-
-/**
- * Create FetchError instance
- *
- * @param String message Error message for human
- * @param String type Error type for machine
- * @param String systemError For Node.js system error
- * @return FetchError
- */
-function FetchError(message, type, systemError) {
- Error.call(this, message);
-
- this.message = message;
- this.type = type;
-
- // when err.type is `system`, err.code contains system error code
- if (systemError) {
- this.code = this.errno = systemError.code;
- }
-
- // hide custom error implementation details from end-users
- Error.captureStackTrace(this, this.constructor);
-}
-
-FetchError.prototype = Object.create(Error.prototype);
-FetchError.prototype.constructor = FetchError;
-FetchError.prototype.name = 'FetchError';
-
-let convert;
-try {
- convert = require('encoding').convert;
-} catch (e) {}
-
-const INTERNALS = Symbol('Body internals');
-
-// fix an issue where "PassThrough" isn't a named export for node <10
-const PassThrough = Stream.PassThrough;
-
-/**
- * Body mixin
- *
- * Ref: https://fetch.spec.whatwg.org/#body
- *
- * @param Stream body Readable stream
- * @param Object opts Response options
- * @return Void
- */
-function Body(body) {
- var _this = this;
-
- var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},
- _ref$size = _ref.size;
-
- let size = _ref$size === undefined ? 0 : _ref$size;
- var _ref$timeout = _ref.timeout;
- let timeout = _ref$timeout === undefined ? 0 : _ref$timeout;
-
- if (body == null) {
- // body is undefined or null
- body = null;
- } else if (isURLSearchParams(body)) {
- // body is a URLSearchParams
- body = Buffer.from(body.toString());
- } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {
- // body is ArrayBuffer
- body = Buffer.from(body);
- } else if (ArrayBuffer.isView(body)) {
- // body is ArrayBufferView
- body = Buffer.from(body.buffer, body.byteOffset, body.byteLength);
- } else if (body instanceof Stream) ; else {
- // none of the above
- // coerce to string then buffer
- body = Buffer.from(String(body));
- }
- this[INTERNALS] = {
- body,
- disturbed: false,
- error: null
- };
- this.size = size;
- this.timeout = timeout;
-
- if (body instanceof Stream) {
- body.on('error', function (err) {
- const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);
- _this[INTERNALS].error = error;
- });
- }
-}
-
-Body.prototype = {
- get body() {
- return this[INTERNALS].body;
- },
-
- get bodyUsed() {
- return this[INTERNALS].disturbed;
- },
-
- /**
- * Decode response as ArrayBuffer
- *
- * @return Promise
- */
- arrayBuffer() {
- return consumeBody.call(this).then(function (buf) {
- return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);
- });
- },
-
- /**
- * Return raw response as Blob
- *
- * @return Promise
- */
- blob() {
- let ct = this.headers && this.headers.get('content-type') || '';
- return consumeBody.call(this).then(function (buf) {
- return Object.assign(
- // Prevent copying
- new Blob([], {
- type: ct.toLowerCase()
- }), {
- [BUFFER]: buf
- });
- });
- },
-
- /**
- * Decode response as json
- *
- * @return Promise
- */
- json() {
- var _this2 = this;
-
- return consumeBody.call(this).then(function (buffer) {
- try {
- return JSON.parse(buffer.toString());
- } catch (err) {
- return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));
- }
- });
- },
-
- /**
- * Decode response as text
- *
- * @return Promise
- */
- text() {
- return consumeBody.call(this).then(function (buffer) {
- return buffer.toString();
- });
- },
-
- /**
- * Decode response as buffer (non-spec api)
- *
- * @return Promise
- */
- buffer() {
- return consumeBody.call(this);
- },
-
- /**
- * Decode response as text, while automatically detecting the encoding and
- * trying to decode to UTF-8 (non-spec api)
- *
- * @return Promise
- */
- textConverted() {
- var _this3 = this;
-
- return consumeBody.call(this).then(function (buffer) {
- return convertBody(buffer, _this3.headers);
- });
- }
-};
-
-// In browsers, all properties are enumerable.
-Object.defineProperties(Body.prototype, {
- body: { enumerable: true },
- bodyUsed: { enumerable: true },
- arrayBuffer: { enumerable: true },
- blob: { enumerable: true },
- json: { enumerable: true },
- text: { enumerable: true }
-});
-
-Body.mixIn = function (proto) {
- for (const name of Object.getOwnPropertyNames(Body.prototype)) {
- // istanbul ignore else: future proof
- if (!(name in proto)) {
- const desc = Object.getOwnPropertyDescriptor(Body.prototype, name);
- Object.defineProperty(proto, name, desc);
- }
- }
-};
-
-/**
- * Consume and convert an entire Body to a Buffer.
- *
- * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body
- *
- * @return Promise
- */
-function consumeBody() {
- var _this4 = this;
-
- if (this[INTERNALS].disturbed) {
- return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));
- }
-
- this[INTERNALS].disturbed = true;
-
- if (this[INTERNALS].error) {
- return Body.Promise.reject(this[INTERNALS].error);
- }
-
- let body = this.body;
-
- // body is null
- if (body === null) {
- return Body.Promise.resolve(Buffer.alloc(0));
- }
-
- // body is blob
- if (isBlob(body)) {
- body = body.stream();
- }
-
- // body is buffer
- if (Buffer.isBuffer(body)) {
- return Body.Promise.resolve(body);
- }
-
- // istanbul ignore if: should never happen
- if (!(body instanceof Stream)) {
- return Body.Promise.resolve(Buffer.alloc(0));
- }
-
- // body is stream
- // get ready to actually consume the body
- let accum = [];
- let accumBytes = 0;
- let abort = false;
-
- return new Body.Promise(function (resolve, reject) {
- let resTimeout;
-
- // allow timeout on slow response body
- if (_this4.timeout) {
- resTimeout = setTimeout(function () {
- abort = true;
- reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));
- }, _this4.timeout);
- }
-
- // handle stream errors
- body.on('error', function (err) {
- if (err.name === 'AbortError') {
- // if the request was aborted, reject with this Error
- abort = true;
- reject(err);
- } else {
- // other errors, such as incorrect content-encoding
- reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));
- }
- });
-
- body.on('data', function (chunk) {
- if (abort || chunk === null) {
- return;
- }
-
- if (_this4.size && accumBytes + chunk.length > _this4.size) {
- abort = true;
- reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));
- return;
- }
-
- accumBytes += chunk.length;
- accum.push(chunk);
- });
-
- body.on('end', function () {
- if (abort) {
- return;
- }
-
- clearTimeout(resTimeout);
-
- try {
- resolve(Buffer.concat(accum, accumBytes));
- } catch (err) {
- // handle streams that have accumulated too much data (issue #414)
- reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));
- }
- });
- });
-}
-
-/**
- * Detect buffer encoding and convert to target encoding
- * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding
- *
- * @param Buffer buffer Incoming buffer
- * @param String encoding Target encoding
- * @return String
- */
-function convertBody(buffer, headers) {
- if (typeof convert !== 'function') {
- throw new Error('The package `encoding` must be installed to use the textConverted() function');
- }
-
- const ct = headers.get('content-type');
- let charset = 'utf-8';
- let res, str;
-
- // header
- if (ct) {
- res = /charset=([^;]*)/i.exec(ct);
- }
-
- // no charset in content type, peek at response body for at most 1024 bytes
- str = buffer.slice(0, 1024).toString();
-
- // html5
- if (!res && str) {
- res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;
-
- this[MAP] = Object.create(null);
-
- if (init instanceof Headers) {
- const rawHeaders = init.raw();
- const headerNames = Object.keys(rawHeaders);
-
- for (const headerName of headerNames) {
- for (const value of rawHeaders[headerName]) {
- this.append(headerName, value);
- }
- }
-
- return;
- }
-
- // We don't worry about converting prop to ByteString here as append()
- // will handle it.
- if (init == null) ; else if (typeof init === 'object') {
- const method = init[Symbol.iterator];
- if (method != null) {
- if (typeof method !== 'function') {
- throw new TypeError('Header pairs must be iterable');
- }
-
- // sequence>
- // Note: per spec we have to first exhaust the lists then process them
- const pairs = [];
- for (const pair of init) {
- if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {
- throw new TypeError('Each header pair must be iterable');
- }
- pairs.push(Array.from(pair));
- }
-
- for (const pair of pairs) {
- if (pair.length !== 2) {
- throw new TypeError('Each header pair must be a name/value tuple');
- }
- this.append(pair[0], pair[1]);
- }
- } else {
- // record
- for (const key of Object.keys(init)) {
- const value = init[key];
- this.append(key, value);
- }
- }
- } else {
- throw new TypeError('Provided initializer must be an object');
- }
- }
-
- /**
- * Return combined header value given name
- *
- * @param String name Header name
- * @return Mixed
- */
- get(name) {
- name = `${name}`;
- validateName(name);
- const key = find(this[MAP], name);
- if (key === undefined) {
- return null;
- }
-
- return this[MAP][key].join(', ');
- }
-
- /**
- * Iterate over all headers
- *
- * @param Function callback Executed for each item with parameters (value, name, thisArg)
- * @param Boolean thisArg `this` context for callback function
- * @return Void
- */
- forEach(callback) {
- let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;
-
- let pairs = getHeaders(this);
- let i = 0;
- while (i < pairs.length) {
- var _pairs$i = pairs[i];
- const name = _pairs$i[0],
- value = _pairs$i[1];
-
- callback.call(thisArg, value, name, this);
- pairs = getHeaders(this);
- i++;
- }
- }
-
- /**
- * Overwrite header values given name
- *
- * @param String name Header name
- * @param String value Header value
- * @return Void
- */
- set(name, value) {
- name = `${name}`;
- value = `${value}`;
- validateName(name);
- validateValue(value);
- const key = find(this[MAP], name);
- this[MAP][key !== undefined ? key : name] = [value];
- }
-
- /**
- * Append a value onto existing header
- *
- * @param String name Header name
- * @param String value Header value
- * @return Void
- */
- append(name, value) {
- name = `${name}`;
- value = `${value}`;
- validateName(name);
- validateValue(value);
- const key = find(this[MAP], name);
- if (key !== undefined) {
- this[MAP][key].push(value);
- } else {
- this[MAP][name] = [value];
- }
- }
-
- /**
- * Check for header name existence
- *
- * @param String name Header name
- * @return Boolean
- */
- has(name) {
- name = `${name}`;
- validateName(name);
- return find(this[MAP], name) !== undefined;
- }
-
- /**
- * Delete all header values given name
- *
- * @param String name Header name
- * @return Void
- */
- delete(name) {
- name = `${name}`;
- validateName(name);
- const key = find(this[MAP], name);
- if (key !== undefined) {
- delete this[MAP][key];
- }
- }
-
- /**
- * Return raw headers (non-spec api)
- *
- * @return Object
- */
- raw() {
- return this[MAP];
- }
-
- /**
- * Get an iterator on keys.
- *
- * @return Iterator
- */
- keys() {
- return createHeadersIterator(this, 'key');
- }
-
- /**
- * Get an iterator on values.
- *
- * @return Iterator
- */
- values() {
- return createHeadersIterator(this, 'value');
- }
-
- /**
- * Get an iterator on entries.
- *
- * This is the default iterator of the Headers object.
- *
- * @return Iterator
- */
- [Symbol.iterator]() {
- return createHeadersIterator(this, 'key+value');
- }
-}
-Headers.prototype.entries = Headers.prototype[Symbol.iterator];
-
-Object.defineProperty(Headers.prototype, Symbol.toStringTag, {
- value: 'Headers',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-Object.defineProperties(Headers.prototype, {
- get: { enumerable: true },
- forEach: { enumerable: true },
- set: { enumerable: true },
- append: { enumerable: true },
- has: { enumerable: true },
- delete: { enumerable: true },
- keys: { enumerable: true },
- values: { enumerable: true },
- entries: { enumerable: true }
-});
-
-function getHeaders(headers) {
- let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';
-
- const keys = Object.keys(headers[MAP]).sort();
- return keys.map(kind === 'key' ? function (k) {
- return k.toLowerCase();
- } : kind === 'value' ? function (k) {
- return headers[MAP][k].join(', ');
- } : function (k) {
- return [k.toLowerCase(), headers[MAP][k].join(', ')];
- });
-}
-
-const INTERNAL = Symbol('internal');
-
-function createHeadersIterator(target, kind) {
- const iterator = Object.create(HeadersIteratorPrototype);
- iterator[INTERNAL] = {
- target,
- kind,
- index: 0
- };
- return iterator;
-}
-
-const HeadersIteratorPrototype = Object.setPrototypeOf({
- next() {
- // istanbul ignore if
- if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
- throw new TypeError('Value of `this` is not a HeadersIterator');
- }
-
- var _INTERNAL = this[INTERNAL];
- const target = _INTERNAL.target,
- kind = _INTERNAL.kind,
- index = _INTERNAL.index;
-
- const values = getHeaders(target, kind);
- const len = values.length;
- if (index >= len) {
- return {
- value: undefined,
- done: true
- };
- }
-
- this[INTERNAL].index = index + 1;
-
- return {
- value: values[index],
- done: false
- };
- }
-}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));
-
-Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
- value: 'HeadersIterator',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-/**
- * Export the Headers object in a form that Node.js can consume.
- *
- * @param Headers headers
- * @return Object
- */
-function exportNodeCompatibleHeaders(headers) {
- const obj = Object.assign({ __proto__: null }, headers[MAP]);
-
- // http.request() only supports string as Host header. This hack makes
- // specifying custom Host header possible.
- const hostHeaderKey = find(headers[MAP], 'Host');
- if (hostHeaderKey !== undefined) {
- obj[hostHeaderKey] = obj[hostHeaderKey][0];
- }
-
- return obj;
-}
-
-/**
- * Create a Headers object from an object of headers, ignoring those that do
- * not conform to HTTP grammar productions.
- *
- * @param Object obj Object of headers
- * @return Headers
- */
-function createHeadersLenient(obj) {
- const headers = new Headers();
- for (const name of Object.keys(obj)) {
- if (invalidTokenRegex.test(name)) {
- continue;
- }
- if (Array.isArray(obj[name])) {
- for (const val of obj[name]) {
- if (invalidHeaderCharRegex.test(val)) {
- continue;
- }
- if (headers[MAP][name] === undefined) {
- headers[MAP][name] = [val];
- } else {
- headers[MAP][name].push(val);
- }
- }
- } else if (!invalidHeaderCharRegex.test(obj[name])) {
- headers[MAP][name] = [obj[name]];
- }
- }
- return headers;
-}
-
-const INTERNALS$1 = Symbol('Response internals');
-
-// fix an issue where "STATUS_CODES" aren't a named export for node <10
-const STATUS_CODES = http.STATUS_CODES;
-
-/**
- * Response class
- *
- * @param Stream body Readable stream
- * @param Object opts Response options
- * @return Void
- */
-class Response {
- constructor() {
- let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;
- let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-
- Body.call(this, body, opts);
-
- const status = opts.status || 200;
- const headers = new Headers(opts.headers);
-
- if (body != null && !headers.has('Content-Type')) {
- const contentType = extractContentType(body);
- if (contentType) {
- headers.append('Content-Type', contentType);
- }
- }
-
- this[INTERNALS$1] = {
- url: opts.url,
- status,
- statusText: opts.statusText || STATUS_CODES[status],
- headers,
- counter: opts.counter
- };
- }
-
- get url() {
- return this[INTERNALS$1].url;
- }
-
- get status() {
- return this[INTERNALS$1].status;
- }
-
- /**
- * Convenience property representing if the request ended normally
- */
- get ok() {
- return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;
- }
-
- get redirected() {
- return this[INTERNALS$1].counter > 0;
- }
-
- get statusText() {
- return this[INTERNALS$1].statusText;
- }
-
- get headers() {
- return this[INTERNALS$1].headers;
- }
-
- /**
- * Clone this response
- *
- * @return Response
- */
- clone() {
- return new Response(clone(this), {
- url: this.url,
- status: this.status,
- statusText: this.statusText,
- headers: this.headers,
- ok: this.ok,
- redirected: this.redirected
- });
- }
-}
-
-Body.mixIn(Response.prototype);
-
-Object.defineProperties(Response.prototype, {
- url: { enumerable: true },
- status: { enumerable: true },
- ok: { enumerable: true },
- redirected: { enumerable: true },
- statusText: { enumerable: true },
- headers: { enumerable: true },
- clone: { enumerable: true }
-});
-
-Object.defineProperty(Response.prototype, Symbol.toStringTag, {
- value: 'Response',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-const INTERNALS$2 = Symbol('Request internals');
-
-// fix an issue where "format", "parse" aren't a named export for node <10
-const parse_url = Url.parse;
-const format_url = Url.format;
-
-const streamDestructionSupported = 'destroy' in Stream.Readable.prototype;
-
-/**
- * Check if a value is an instance of Request.
- *
- * @param Mixed input
- * @return Boolean
- */
-function isRequest(input) {
- return typeof input === 'object' && typeof input[INTERNALS$2] === 'object';
-}
-
-function isAbortSignal(signal) {
- const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);
- return !!(proto && proto.constructor.name === 'AbortSignal');
-}
-
-/**
- * Request class
- *
- * @param Mixed input Url or Request instance
- * @param Object init Custom options
- * @return Void
- */
-class Request {
- constructor(input) {
- let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-
- let parsedURL;
-
- // normalize input
- if (!isRequest(input)) {
- if (input && input.href) {
- // in order to support Node.js' Url objects; though WHATWG's URL objects
- // will fall into this branch also (since their `toString()` will return
- // `href` property anyway)
- parsedURL = parse_url(input.href);
- } else {
- // coerce input to a string before attempting to parse
- parsedURL = parse_url(`${input}`);
- }
- input = {};
- } else {
- parsedURL = parse_url(input.url);
- }
-
- let method = init.method || input.method || 'GET';
- method = method.toUpperCase();
-
- if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {
- throw new TypeError('Request with GET/HEAD method cannot have body');
- }
-
- let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;
-
- Body.call(this, inputBody, {
- timeout: init.timeout || input.timeout || 0,
- size: init.size || input.size || 0
- });
-
- const headers = new Headers(init.headers || input.headers || {});
-
- if (inputBody != null && !headers.has('Content-Type')) {
- const contentType = extractContentType(inputBody);
- if (contentType) {
- headers.append('Content-Type', contentType);
- }
- }
-
- let signal = isRequest(input) ? input.signal : null;
- if ('signal' in init) signal = init.signal;
-
- if (signal != null && !isAbortSignal(signal)) {
- throw new TypeError('Expected signal to be an instanceof AbortSignal');
- }
-
- this[INTERNALS$2] = {
- method,
- redirect: init.redirect || input.redirect || 'follow',
- headers,
- parsedURL,
- signal
- };
-
- // node-fetch-only options
- this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;
- this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;
- this.counter = init.counter || input.counter || 0;
- this.agent = init.agent || input.agent;
- }
-
- get method() {
- return this[INTERNALS$2].method;
- }
-
- get url() {
- return format_url(this[INTERNALS$2].parsedURL);
- }
-
- get headers() {
- return this[INTERNALS$2].headers;
- }
-
- get redirect() {
- return this[INTERNALS$2].redirect;
- }
-
- get signal() {
- return this[INTERNALS$2].signal;
- }
-
- /**
- * Clone this request
- *
- * @return Request
- */
- clone() {
- return new Request(this);
- }
-}
-
-Body.mixIn(Request.prototype);
-
-Object.defineProperty(Request.prototype, Symbol.toStringTag, {
- value: 'Request',
- writable: false,
- enumerable: false,
- configurable: true
-});
-
-Object.defineProperties(Request.prototype, {
- method: { enumerable: true },
- url: { enumerable: true },
- headers: { enumerable: true },
- redirect: { enumerable: true },
- clone: { enumerable: true },
- signal: { enumerable: true }
-});
-
-/**
- * Convert a Request to Node.js http request options.
- *
- * @param Request A Request instance
- * @return Object The options object to be passed to http.request
- */
-function getNodeRequestOptions(request) {
- const parsedURL = request[INTERNALS$2].parsedURL;
- const headers = new Headers(request[INTERNALS$2].headers);
-
- // fetch step 1.3
- if (!headers.has('Accept')) {
- headers.set('Accept', '*/*');
- }
-
- // Basic fetch
- if (!parsedURL.protocol || !parsedURL.hostname) {
- throw new TypeError('Only absolute URLs are supported');
- }
-
- if (!/^https?:$/.test(parsedURL.protocol)) {
- throw new TypeError('Only HTTP(S) protocols are supported');
- }
-
- if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {
- throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');
- }
-
- // HTTP-network-or-cache fetch steps 2.4-2.7
- let contentLengthValue = null;
- if (request.body == null && /^(POST|PUT)$/i.test(request.method)) {
- contentLengthValue = '0';
- }
- if (request.body != null) {
- const totalBytes = getTotalBytes(request);
- if (typeof totalBytes === 'number') {
- contentLengthValue = String(totalBytes);
- }
- }
- if (contentLengthValue) {
- headers.set('Content-Length', contentLengthValue);
- }
-
- // HTTP-network-or-cache fetch step 2.11
- if (!headers.has('User-Agent')) {
- headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');
- }
-
- // HTTP-network-or-cache fetch step 2.15
- if (request.compress && !headers.has('Accept-Encoding')) {
- headers.set('Accept-Encoding', 'gzip,deflate');
- }
-
- if (!headers.has('Connection') && !request.agent) {
- headers.set('Connection', 'close');
- }
-
- // HTTP-network fetch step 4.2
- // chunked encoding is handled by Node.js
-
- return Object.assign({}, parsedURL, {
- method: request.method,
- headers: exportNodeCompatibleHeaders(headers),
- agent: request.agent
- });
-}
-
-/**
- * abort-error.js
- *
- * AbortError interface for cancelled requests
- */
-
-/**
- * Create AbortError instance
- *
- * @param String message Error message for human
- * @return AbortError
- */
-function AbortError(message) {
- Error.call(this, message);
-
- this.type = 'aborted';
- this.message = message;
-
- // hide custom error implementation details from end-users
- Error.captureStackTrace(this, this.constructor);
-}
-
-AbortError.prototype = Object.create(Error.prototype);
-AbortError.prototype.constructor = AbortError;
-AbortError.prototype.name = 'AbortError';
-
-// fix an issue where "PassThrough", "resolve" aren't a named export for node <10
-const PassThrough$1 = Stream.PassThrough;
-const resolve_url = Url.resolve;
-
-/**
- * Fetch function
- *
- * @param Mixed url Absolute url or Request instance
- * @param Object opts Fetch options
- * @return Promise
- */
-function fetch(url, opts) {
-
- // allow custom promise
- if (!fetch.Promise) {
- throw new Error('native promise missing, set fetch.Promise to your favorite alternative');
- }
-
- Body.Promise = fetch.Promise;
-
- // wrap http.request into fetch
- return new fetch.Promise(function (resolve, reject) {
- // build request object
- const request = new Request(url, opts);
- const options = getNodeRequestOptions(request);
-
- const send = (options.protocol === 'https:' ? https : http).request;
- const signal = request.signal;
-
- let response = null;
-
- const abort = function abort() {
- let error = new AbortError('The user aborted a request.');
- reject(error);
- if (request.body && request.body instanceof Stream.Readable) {
- request.body.destroy(error);
- }
- if (!response || !response.body) return;
- response.body.emit('error', error);
- };
-
- if (signal && signal.aborted) {
- abort();
- return;
- }
-
- const abortAndFinalize = function abortAndFinalize() {
- abort();
- finalize();
- };
-
- // send request
- const req = send(options);
- let reqTimeout;
-
- if (signal) {
- signal.addEventListener('abort', abortAndFinalize);
- }
-
- function finalize() {
- req.abort();
- if (signal) signal.removeEventListener('abort', abortAndFinalize);
- clearTimeout(reqTimeout);
- }
-
- if (request.timeout) {
- req.once('socket', function (socket) {
- reqTimeout = setTimeout(function () {
- reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));
- finalize();
- }, request.timeout);
- });
- }
-
- req.on('error', function (err) {
- reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));
- finalize();
- });
-
- req.on('response', function (res) {
- clearTimeout(reqTimeout);
-
- const headers = createHeadersLenient(res.headers);
-
- // HTTP fetch step 5
- if (fetch.isRedirect(res.statusCode)) {
- // HTTP fetch step 5.2
- const location = headers.get('Location');
-
- // HTTP fetch step 5.3
- const locationURL = location === null ? null : resolve_url(request.url, location);
-
- // HTTP fetch step 5.5
- switch (request.redirect) {
- case 'error':
- reject(new FetchError(`redirect mode is set to error: ${request.url}`, 'no-redirect'));
- finalize();
- return;
- case 'manual':
- // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.
- if (locationURL !== null) {
- // handle corrupted header
- try {
- headers.set('Location', locationURL);
- } catch (err) {
- // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request
- reject(err);
- }
- }
- break;
- case 'follow':
- // HTTP-redirect fetch step 2
- if (locationURL === null) {
- break;
- }
-
- // HTTP-redirect fetch step 5
- if (request.counter >= request.follow) {
- reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));
- finalize();
- return;
- }
-
- // HTTP-redirect fetch step 6 (counter increment)
- // Create a new Request object.
- const requestOpts = {
- headers: new Headers(request.headers),
- follow: request.follow,
- counter: request.counter + 1,
- agent: request.agent,
- compress: request.compress,
- method: request.method,
- body: request.body,
- signal: request.signal,
- timeout: request.timeout
- };
-
- // HTTP-redirect fetch step 9
- if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {
- reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));
- finalize();
- return;
- }
-
- // HTTP-redirect fetch step 11
- if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {
- requestOpts.method = 'GET';
- requestOpts.body = undefined;
- requestOpts.headers.delete('content-length');
- }
-
- // HTTP-redirect fetch step 15
- resolve(fetch(new Request(locationURL, requestOpts)));
- finalize();
- return;
- }
- }
-
- // prepare response
- res.once('end', function () {
- if (signal) signal.removeEventListener('abort', abortAndFinalize);
- });
- let body = res.pipe(new PassThrough$1());
-
- const response_options = {
- url: request.url,
- status: res.statusCode,
- statusText: res.statusMessage,
- headers: headers,
- size: request.size,
- timeout: request.timeout,
- counter: request.counter
- };
-
- // HTTP-network fetch step 12.1.1.3
- const codings = headers.get('Content-Encoding');
-
- // HTTP-network fetch step 12.1.1.4: handle content codings
-
- // in following scenarios we ignore compression support
- // 1. compression support is disabled
- // 2. HEAD request
- // 3. no Content-Encoding header
- // 4. no content response (204)
- // 5. content not modified response (304)
- if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {
- response = new Response(body, response_options);
- resolve(response);
- return;
- }
-
- // For Node v6+
- // Be less strict when decoding compressed responses, since sometimes
- // servers send slightly invalid responses that are still accepted
- // by common browsers.
- // Always using Z_SYNC_FLUSH is what cURL does.
- const zlibOptions = {
- flush: zlib.Z_SYNC_FLUSH,
- finishFlush: zlib.Z_SYNC_FLUSH
- };
-
- // for gzip
- if (codings == 'gzip' || codings == 'x-gzip') {
- body = body.pipe(zlib.createGunzip(zlibOptions));
- response = new Response(body, response_options);
- resolve(response);
- return;
- }
-
- // for deflate
- if (codings == 'deflate' || codings == 'x-deflate') {
- // handle the infamous raw deflate response from old servers
- // a hack for old IIS and Apache servers
- const raw = res.pipe(new PassThrough$1());
- raw.once('data', function (chunk) {
- // see http://stackoverflow.com/questions/37519828
- if ((chunk[0] & 0x0F) === 0x08) {
- body = body.pipe(zlib.createInflate());
- } else {
- body = body.pipe(zlib.createInflateRaw());
- }
- response = new Response(body, response_options);
- resolve(response);
- });
- return;
- }
-
- // for br
- if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {
- body = body.pipe(zlib.createBrotliDecompress());
- response = new Response(body, response_options);
- resolve(response);
- return;
- }
-
- // otherwise, use response as-is
- response = new Response(body, response_options);
- resolve(response);
- });
-
- writeToStream(req, request);
- });
-}
-/**
- * Redirect code matching
- *
- * @param Number code Status code
- * @return Boolean
- */
-fetch.isRedirect = function (code) {
- return code === 301 || code === 302 || code === 303 || code === 307 || code === 308;
-};
-
-// expose Promise
-fetch.Promise = global.Promise;
-
-export default fetch;
-export { Headers, Request, Response, FetchError };
diff --git a/scripts/metrics/node_modules/node-fetch/package.json b/scripts/metrics/node_modules/node-fetch/package.json
deleted file mode 100644
index e93129c801f..00000000000
--- a/scripts/metrics/node_modules/node-fetch/package.json
+++ /dev/null
@@ -1,94 +0,0 @@
-{
- "_from": "node-fetch",
- "_id": "node-fetch@2.5.0",
- "_inBundle": false,
- "_integrity": "sha512-YuZKluhWGJwCcUu4RlZstdAxr8bFfOVHakc1mplwHkk8J+tqM1Y5yraYvIUpeX8aY7+crCwiELJq7Vl0o0LWXw==",
- "_location": "/node-fetch",
- "_phantomChildren": {},
- "_requested": {
- "type": "tag",
- "registry": true,
- "raw": "node-fetch",
- "name": "node-fetch",
- "escapedName": "node-fetch",
- "rawSpec": "",
- "saveSpec": null,
- "fetchSpec": "latest"
- },
- "_requiredBy": [
- "#USER",
- "/"
- ],
- "_resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.5.0.tgz",
- "_shasum": "8028c49fc1191bba56a07adc6e2a954644a48501",
- "_spec": "node-fetch",
- "_where": "/Users/zachary.butler/Work/auto-buildkite-pipelines/scripts/metrics",
- "author": {
- "name": "David Frank"
- },
- "browser": "./browser.js",
- "bugs": {
- "url": "https://github.com/bitinn/node-fetch/issues"
- },
- "bundleDependencies": false,
- "dependencies": {},
- "deprecated": false,
- "description": "A light-weight module that brings window.fetch to node.js",
- "devDependencies": {
- "@ungap/url-search-params": "^0.1.2",
- "abort-controller": "^1.1.0",
- "abortcontroller-polyfill": "^1.3.0",
- "babel-core": "^6.26.3",
- "babel-plugin-istanbul": "^4.1.6",
- "babel-preset-env": "^1.6.1",
- "babel-register": "^6.16.3",
- "chai": "^3.5.0",
- "chai-as-promised": "^7.1.1",
- "chai-iterator": "^1.1.1",
- "chai-string": "~1.3.0",
- "codecov": "^3.3.0",
- "cross-env": "^5.2.0",
- "form-data": "^2.3.3",
- "is-builtin-module": "^1.0.0",
- "mocha": "^5.0.0",
- "nyc": "11.9.0",
- "parted": "^0.1.1",
- "promise": "^8.0.3",
- "resumer": "0.0.0",
- "rollup": "^0.63.4",
- "rollup-plugin-babel": "^3.0.7",
- "string-to-arraybuffer": "^1.0.2",
- "whatwg-url": "^5.0.0"
- },
- "engines": {
- "node": "4.x || >=6.0.0"
- },
- "files": [
- "lib/index.js",
- "lib/index.mjs",
- "lib/index.es.js",
- "browser.js"
- ],
- "homepage": "https://github.com/bitinn/node-fetch",
- "keywords": [
- "fetch",
- "http",
- "promise"
- ],
- "license": "MIT",
- "main": "lib/index",
- "module": "lib/index.mjs",
- "name": "node-fetch",
- "repository": {
- "type": "git",
- "url": "git+https://github.com/bitinn/node-fetch.git"
- },
- "scripts": {
- "build": "cross-env BABEL_ENV=rollup rollup -c",
- "coverage": "cross-env BABEL_ENV=coverage nyc --reporter json --reporter text mocha -R spec test/test.js && codecov -f coverage/coverage-final.json",
- "prepare": "npm run build",
- "report": "cross-env BABEL_ENV=coverage nyc --reporter lcov --reporter text mocha -R spec test/test.js",
- "test": "cross-env BABEL_ENV=test mocha --require babel-register --throw-deprecation test/test.js"
- },
- "version": "2.5.0"
-}
diff --git a/scripts/metrics/node_modules/sax/LICENSE b/scripts/metrics/node_modules/sax/LICENSE
deleted file mode 100644
index ccffa082c99..00000000000
--- a/scripts/metrics/node_modules/sax/LICENSE
+++ /dev/null
@@ -1,41 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-====
-
-`String.fromCodePoint` by Mathias Bynens used according to terms of MIT
-License, as follows:
-
- Copyright Mathias Bynens
-
- Permission is hereby granted, free of charge, to any person obtaining
- a copy of this software and associated documentation files (the
- "Software"), to deal in the Software without restriction, including
- without limitation the rights to use, copy, modify, merge, publish,
- distribute, sublicense, and/or sell copies of the Software, and to
- permit persons to whom the Software is furnished to do so, subject to
- the following conditions:
-
- The above copyright notice and this permission notice shall be
- included in all copies or substantial portions of the Software.
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
- LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
- OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
- WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/scripts/metrics/node_modules/sax/README.md b/scripts/metrics/node_modules/sax/README.md
deleted file mode 100644
index afcd3f3dd65..00000000000
--- a/scripts/metrics/node_modules/sax/README.md
+++ /dev/null
@@ -1,225 +0,0 @@
-# sax js
-
-A sax-style parser for XML and HTML.
-
-Designed with [node](http://nodejs.org/) in mind, but should work fine in
-the browser or other CommonJS implementations.
-
-## What This Is
-
-* A very simple tool to parse through an XML string.
-* A stepping stone to a streaming HTML parser.
-* A handy way to deal with RSS and other mostly-ok-but-kinda-broken XML
- docs.
-
-## What This Is (probably) Not
-
-* An HTML Parser - That's a fine goal, but this isn't it. It's just
- XML.
-* A DOM Builder - You can use it to build an object model out of XML,
- but it doesn't do that out of the box.
-* XSLT - No DOM = no querying.
-* 100% Compliant with (some other SAX implementation) - Most SAX
- implementations are in Java and do a lot more than this does.
-* An XML Validator - It does a little validation when in strict mode, but
- not much.
-* A Schema-Aware XSD Thing - Schemas are an exercise in fetishistic
- masochism.
-* A DTD-aware Thing - Fetching DTDs is a much bigger job.
-
-## Regarding `Hello, world!').close();
-
-// stream usage
-// takes the same options as the parser
-var saxStream = require("sax").createStream(strict, options)
-saxStream.on("error", function (e) {
- // unhandled errors will throw, since this is a proper node
- // event emitter.
- console.error("error!", e)
- // clear the error
- this._parser.error = null
- this._parser.resume()
-})
-saxStream.on("opentag", function (node) {
- // same object as above
-})
-// pipe is supported, and it's readable/writable
-// same chunks coming in also go out.
-fs.createReadStream("file.xml")
- .pipe(saxStream)
- .pipe(fs.createWriteStream("file-copy.xml"))
-```
-
-
-## Arguments
-
-Pass the following arguments to the parser function. All are optional.
-
-`strict` - Boolean. Whether or not to be a jerk. Default: `false`.
-
-`opt` - Object bag of settings regarding string formatting. All default to `false`.
-
-Settings supported:
-
-* `trim` - Boolean. Whether or not to trim text and comment nodes.
-* `normalize` - Boolean. If true, then turn any whitespace into a single
- space.
-* `lowercase` - Boolean. If true, then lowercase tag names and attribute names
- in loose mode, rather than uppercasing them.
-* `xmlns` - Boolean. If true, then namespaces are supported.
-* `position` - Boolean. If false, then don't track line/col/position.
-* `strictEntities` - Boolean. If true, only parse [predefined XML
- entities](http://www.w3.org/TR/REC-xml/#sec-predefined-ent)
- (`&`, `'`, `>`, `<`, and `"`)
-
-## Methods
-
-`write` - Write bytes onto the stream. You don't have to do this all at
-once. You can keep writing as much as you want.
-
-`close` - Close the stream. Once closed, no more data may be written until
-it is done processing the buffer, which is signaled by the `end` event.
-
-`resume` - To gracefully handle errors, assign a listener to the `error`
-event. Then, when the error is taken care of, you can call `resume` to
-continue parsing. Otherwise, the parser will not continue while in an error
-state.
-
-## Members
-
-At all times, the parser object will have the following members:
-
-`line`, `column`, `position` - Indications of the position in the XML
-document where the parser currently is looking.
-
-`startTagPosition` - Indicates the position where the current tag starts.
-
-`closed` - Boolean indicating whether or not the parser can be written to.
-If it's `true`, then wait for the `ready` event to write again.
-
-`strict` - Boolean indicating whether or not the parser is a jerk.
-
-`opt` - Any options passed into the constructor.
-
-`tag` - The current tag being dealt with.
-
-And a bunch of other stuff that you probably shouldn't touch.
-
-## Events
-
-All events emit with a single argument. To listen to an event, assign a
-function to `on`. Functions get executed in the this-context of
-the parser object. The list of supported events are also in the exported
-`EVENTS` array.
-
-When using the stream interface, assign handlers using the EventEmitter
-`on` function in the normal fashion.
-
-`error` - Indication that something bad happened. The error will be hanging
-out on `parser.error`, and must be deleted before parsing can continue. By
-listening to this event, you can keep an eye on that kind of stuff. Note:
-this happens *much* more in strict mode. Argument: instance of `Error`.
-
-`text` - Text node. Argument: string of text.
-
-`doctype` - The ``. Argument:
-object with `name` and `body` members. Attributes are not parsed, as
-processing instructions have implementation dependent semantics.
-
-`sgmldeclaration` - Random SGML declarations. Stuff like ``
-would trigger this kind of event. This is a weird thing to support, so it
-might go away at some point. SAX isn't intended to be used to parse SGML,
-after all.
-
-`opentagstart` - Emitted immediately when the tag name is available,
-but before any attributes are encountered. Argument: object with a
-`name` field and an empty `attributes` set. Note that this is the
-same object that will later be emitted in the `opentag` event.
-
-`opentag` - An opening tag. Argument: object with `name` and `attributes`.
-In non-strict mode, tag names are uppercased, unless the `lowercase`
-option is set. If the `xmlns` option is set, then it will contain
-namespace binding information on the `ns` member, and will have a
-`local`, `prefix`, and `uri` member.
-
-`closetag` - A closing tag. In loose mode, tags are auto-closed if their
-parent closes. In strict mode, well-formedness is enforced. Note that
-self-closing tags will have `closeTag` emitted immediately after `openTag`.
-Argument: tag name.
-
-`attribute` - An attribute node. Argument: object with `name` and `value`.
-In non-strict mode, attribute names are uppercased, unless the `lowercase`
-option is set. If the `xmlns` option is set, it will also contains namespace
-information.
-
-`comment` - A comment node. Argument: the string of the comment.
-
-`opencdata` - The opening tag of a ``) of a `` tags trigger a `"script"`
-event, and their contents are not checked for special xml characters.
-If you pass `noscript: true`, then this behavior is suppressed.
-
-## Reporting Problems
-
-It's best to write a failing test if you find an issue. I will always
-accept pull requests with failing tests if they demonstrate intended
-behavior, but it is very hard to figure out what issue you're describing
-without a test. Writing a test is also the best way for you yourself
-to figure out if you really understand the issue you think you have with
-sax-js.
diff --git a/scripts/metrics/node_modules/sax/lib/sax.js b/scripts/metrics/node_modules/sax/lib/sax.js
deleted file mode 100644
index 795d607ef63..00000000000
--- a/scripts/metrics/node_modules/sax/lib/sax.js
+++ /dev/null
@@ -1,1565 +0,0 @@
-;(function (sax) { // wrapper for non-node envs
- sax.parser = function (strict, opt) { return new SAXParser(strict, opt) }
- sax.SAXParser = SAXParser
- sax.SAXStream = SAXStream
- sax.createStream = createStream
-
- // When we pass the MAX_BUFFER_LENGTH position, start checking for buffer overruns.
- // When we check, schedule the next check for MAX_BUFFER_LENGTH - (max(buffer lengths)),
- // since that's the earliest that a buffer overrun could occur. This way, checks are
- // as rare as required, but as often as necessary to ensure never crossing this bound.
- // Furthermore, buffers are only tested at most once per write(), so passing a very
- // large string into write() might have undesirable effects, but this is manageable by
- // the caller, so it is assumed to be safe. Thus, a call to write() may, in the extreme
- // edge case, result in creating at most one complete copy of the string passed in.
- // Set to Infinity to have unlimited buffers.
- sax.MAX_BUFFER_LENGTH = 64 * 1024
-
- var buffers = [
- 'comment', 'sgmlDecl', 'textNode', 'tagName', 'doctype',
- 'procInstName', 'procInstBody', 'entity', 'attribName',
- 'attribValue', 'cdata', 'script'
- ]
-
- sax.EVENTS = [
- 'text',
- 'processinginstruction',
- 'sgmldeclaration',
- 'doctype',
- 'comment',
- 'opentagstart',
- 'attribute',
- 'opentag',
- 'closetag',
- 'opencdata',
- 'cdata',
- 'closecdata',
- 'error',
- 'end',
- 'ready',
- 'script',
- 'opennamespace',
- 'closenamespace'
- ]
-
- function SAXParser (strict, opt) {
- if (!(this instanceof SAXParser)) {
- return new SAXParser(strict, opt)
- }
-
- var parser = this
- clearBuffers(parser)
- parser.q = parser.c = ''
- parser.bufferCheckPosition = sax.MAX_BUFFER_LENGTH
- parser.opt = opt || {}
- parser.opt.lowercase = parser.opt.lowercase || parser.opt.lowercasetags
- parser.looseCase = parser.opt.lowercase ? 'toLowerCase' : 'toUpperCase'
- parser.tags = []
- parser.closed = parser.closedRoot = parser.sawRoot = false
- parser.tag = parser.error = null
- parser.strict = !!strict
- parser.noscript = !!(strict || parser.opt.noscript)
- parser.state = S.BEGIN
- parser.strictEntities = parser.opt.strictEntities
- parser.ENTITIES = parser.strictEntities ? Object.create(sax.XML_ENTITIES) : Object.create(sax.ENTITIES)
- parser.attribList = []
-
- // namespaces form a prototype chain.
- // it always points at the current tag,
- // which protos to its parent tag.
- if (parser.opt.xmlns) {
- parser.ns = Object.create(rootNS)
- }
-
- // mostly just for error reporting
- parser.trackPosition = parser.opt.position !== false
- if (parser.trackPosition) {
- parser.position = parser.line = parser.column = 0
- }
- emit(parser, 'onready')
- }
-
- if (!Object.create) {
- Object.create = function (o) {
- function F () {}
- F.prototype = o
- var newf = new F()
- return newf
- }
- }
-
- if (!Object.keys) {
- Object.keys = function (o) {
- var a = []
- for (var i in o) if (o.hasOwnProperty(i)) a.push(i)
- return a
- }
- }
-
- function checkBufferLength (parser) {
- var maxAllowed = Math.max(sax.MAX_BUFFER_LENGTH, 10)
- var maxActual = 0
- for (var i = 0, l = buffers.length; i < l; i++) {
- var len = parser[buffers[i]].length
- if (len > maxAllowed) {
- // Text/cdata nodes can get big, and since they're buffered,
- // we can get here under normal conditions.
- // Avoid issues by emitting the text node now,
- // so at least it won't get any bigger.
- switch (buffers[i]) {
- case 'textNode':
- closeText(parser)
- break
-
- case 'cdata':
- emitNode(parser, 'oncdata', parser.cdata)
- parser.cdata = ''
- break
-
- case 'script':
- emitNode(parser, 'onscript', parser.script)
- parser.script = ''
- break
-
- default:
- error(parser, 'Max buffer length exceeded: ' + buffers[i])
- }
- }
- maxActual = Math.max(maxActual, len)
- }
- // schedule the next check for the earliest possible buffer overrun.
- var m = sax.MAX_BUFFER_LENGTH - maxActual
- parser.bufferCheckPosition = m + parser.position
- }
-
- function clearBuffers (parser) {
- for (var i = 0, l = buffers.length; i < l; i++) {
- parser[buffers[i]] = ''
- }
- }
-
- function flushBuffers (parser) {
- closeText(parser)
- if (parser.cdata !== '') {
- emitNode(parser, 'oncdata', parser.cdata)
- parser.cdata = ''
- }
- if (parser.script !== '') {
- emitNode(parser, 'onscript', parser.script)
- parser.script = ''
- }
- }
-
- SAXParser.prototype = {
- end: function () { end(this) },
- write: write,
- resume: function () { this.error = null; return this },
- close: function () { return this.write(null) },
- flush: function () { flushBuffers(this) }
- }
-
- var Stream
- try {
- Stream = require('stream').Stream
- } catch (ex) {
- Stream = function () {}
- }
-
- var streamWraps = sax.EVENTS.filter(function (ev) {
- return ev !== 'error' && ev !== 'end'
- })
-
- function createStream (strict, opt) {
- return new SAXStream(strict, opt)
- }
-
- function SAXStream (strict, opt) {
- if (!(this instanceof SAXStream)) {
- return new SAXStream(strict, opt)
- }
-
- Stream.apply(this)
-
- this._parser = new SAXParser(strict, opt)
- this.writable = true
- this.readable = true
-
- var me = this
-
- this._parser.onend = function () {
- me.emit('end')
- }
-
- this._parser.onerror = function (er) {
- me.emit('error', er)
-
- // if didn't throw, then means error was handled.
- // go ahead and clear error, so we can write again.
- me._parser.error = null
- }
-
- this._decoder = null
-
- streamWraps.forEach(function (ev) {
- Object.defineProperty(me, 'on' + ev, {
- get: function () {
- return me._parser['on' + ev]
- },
- set: function (h) {
- if (!h) {
- me.removeAllListeners(ev)
- me._parser['on' + ev] = h
- return h
- }
- me.on(ev, h)
- },
- enumerable: true,
- configurable: false
- })
- })
- }
-
- SAXStream.prototype = Object.create(Stream.prototype, {
- constructor: {
- value: SAXStream
- }
- })
-
- SAXStream.prototype.write = function (data) {
- if (typeof Buffer === 'function' &&
- typeof Buffer.isBuffer === 'function' &&
- Buffer.isBuffer(data)) {
- if (!this._decoder) {
- var SD = require('string_decoder').StringDecoder
- this._decoder = new SD('utf8')
- }
- data = this._decoder.write(data)
- }
-
- this._parser.write(data.toString())
- this.emit('data', data)
- return true
- }
-
- SAXStream.prototype.end = function (chunk) {
- if (chunk && chunk.length) {
- this.write(chunk)
- }
- this._parser.end()
- return true
- }
-
- SAXStream.prototype.on = function (ev, handler) {
- var me = this
- if (!me._parser['on' + ev] && streamWraps.indexOf(ev) !== -1) {
- me._parser['on' + ev] = function () {
- var args = arguments.length === 1 ? [arguments[0]] : Array.apply(null, arguments)
- args.splice(0, 0, ev)
- me.emit.apply(me, args)
- }
- }
-
- return Stream.prototype.on.call(me, ev, handler)
- }
-
- // this really needs to be replaced with character classes.
- // XML allows all manner of ridiculous numbers and digits.
- var CDATA = '[CDATA['
- var DOCTYPE = 'DOCTYPE'
- var XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'
- var XMLNS_NAMESPACE = 'http://www.w3.org/2000/xmlns/'
- var rootNS = { xml: XML_NAMESPACE, xmlns: XMLNS_NAMESPACE }
-
- // http://www.w3.org/TR/REC-xml/#NT-NameStartChar
- // This implementation works on strings, a single character at a time
- // as such, it cannot ever support astral-plane characters (10000-EFFFF)
- // without a significant breaking change to either this parser, or the
- // JavaScript language. Implementation of an emoji-capable xml parser
- // is left as an exercise for the reader.
- var nameStart = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/
-
- var nameBody = /[:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/
-
- var entityStart = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD]/
- var entityBody = /[#:_A-Za-z\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u02FF\u0370-\u037D\u037F-\u1FFF\u200C-\u200D\u2070-\u218F\u2C00-\u2FEF\u3001-\uD7FF\uF900-\uFDCF\uFDF0-\uFFFD\u00B7\u0300-\u036F\u203F-\u2040.\d-]/
-
- function isWhitespace (c) {
- return c === ' ' || c === '\n' || c === '\r' || c === '\t'
- }
-
- function isQuote (c) {
- return c === '"' || c === '\''
- }
-
- function isAttribEnd (c) {
- return c === '>' || isWhitespace(c)
- }
-
- function isMatch (regex, c) {
- return regex.test(c)
- }
-
- function notMatch (regex, c) {
- return !isMatch(regex, c)
- }
-
- var S = 0
- sax.STATE = {
- BEGIN: S++, // leading byte order mark or whitespace
- BEGIN_WHITESPACE: S++, // leading whitespace
- TEXT: S++, // general stuff
- TEXT_ENTITY: S++, // & and such.
- OPEN_WAKA: S++, // <
- SGML_DECL: S++, //
- SCRIPT: S++, //