From b4caad9acd7f61e530895ce0220544cfee93e193 Mon Sep 17 00:00:00 2001 From: peter-evans Date: Mon, 21 Sep 2020 23:56:30 +0000 Subject: [PATCH] Update distribution --- dist/index.js | 15773 ++++++++++++++++++++++++------------------------ 1 file changed, 7890 insertions(+), 7883 deletions(-) diff --git a/dist/index.js b/dist/index.js index b0797908..bea280cf 100644 --- a/dist/index.js +++ b/dist/index.js @@ -1,112 +1,9 @@ module.exports = -/******/ (function(modules, runtime) { // webpackBootstrap -/******/ "use strict"; -/******/ // The module cache -/******/ var installedModules = {}; -/******/ -/******/ // The require function -/******/ function __webpack_require__(moduleId) { -/******/ -/******/ // Check if module is in cache -/******/ if(installedModules[moduleId]) { -/******/ return installedModules[moduleId].exports; -/******/ } -/******/ // Create a new module (and put it into the cache) -/******/ var module = installedModules[moduleId] = { -/******/ i: moduleId, -/******/ l: false, -/******/ exports: {} -/******/ }; -/******/ -/******/ // Execute the module function -/******/ var threw = true; -/******/ try { -/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); -/******/ threw = false; -/******/ } finally { -/******/ if(threw) delete installedModules[moduleId]; -/******/ } -/******/ -/******/ // Flag the module as loaded -/******/ module.l = true; -/******/ -/******/ // Return the exports of the module -/******/ return module.exports; -/******/ } -/******/ -/******/ -/******/ __webpack_require__.ab = __dirname + "/"; -/******/ -/******/ // the startup function -/******/ function startup() { -/******/ // Load entry module and return exports -/******/ return __webpack_require__(109); -/******/ }; -/******/ -/******/ // run startup -/******/ return startup(); -/******/ }) -/************************************************************************/ -/******/ ({ - -/***/ 1: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = isexe -isexe.sync = sync - -var fs = __webpack_require__(747) - -function checkPathExt (path, options) { - var pathext = options.pathExt !== undefined ? - options.pathExt : process.env.PATHEXT - - if (!pathext) { - return true - } - - pathext = pathext.split(';') - if (pathext.indexOf('') !== -1) { - return true - } - for (var i = 0; i < pathext.length; i++) { - var p = pathext[i].toLowerCase() - if (p && path.substr(-p.length).toLowerCase() === p) { - return true - } - } - return false -} - -function checkStat (stat, path, options) { - if (!stat.isSymbolicLink() && !stat.isFile()) { - return false - } - return checkPathExt(path, options) -} - -function isexe (path, options, cb) { - fs.stat(path, function (er, stat) { - cb(er, er ? false : checkStat(stat, path, options)) - }) -} - -function sync (path, options) { - return checkStat(fs.statSync(path), path, options) -} - - -/***/ }), +/******/ (() => { // webpackBootstrap +/******/ var __webpack_modules__ = ({ -/***/ 16: -/***/ (function(module) { - -module.exports = require("tls"); - -/***/ }), - -/***/ 30: -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ 3109: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; @@ -125,1662 +22,154 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getOctokitOptions = exports.GitHub = exports.context = void 0; -const Context = __importStar(__webpack_require__(53)); -const Utils = __importStar(__webpack_require__(914)); -// octokit + plugins -const core_1 = __webpack_require__(762); -const plugin_rest_endpoint_methods_1 = __webpack_require__(44); -const plugin_paginate_rest_1 = __webpack_require__(193); -exports.context = new Context.Context(); -const baseUrl = Utils.getApiBaseUrl(); -const defaults = { - baseUrl, - request: { - agent: Utils.getProxyAgent(baseUrl) - } +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; -exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults); -/** - * Convience function to correctly format Octokit Options to pass into the constructor. - * - * @param token the repo PAT or GITHUB_TOKEN - * @param options other options to set - */ -function getOctokitOptions(token, options) { - const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller - // Auth - const auth = Utils.getAuthString(token, opts); - if (auth) { - opts.auth = auth; - } - return opts; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__webpack_require__(2186)); +const github = __importStar(__webpack_require__(5438)); +const util_1 = __webpack_require__(1669); +function run() { + return __awaiter(this, void 0, void 0, function* () { + try { + const inputs = { + token: core.getInput('token'), + repository: core.getInput('repository'), + eventType: core.getInput('event-type'), + clientPayload: core.getInput('client-payload') + }; + core.debug(`Inputs: ${util_1.inspect(inputs)}`); + const [owner, repo] = inputs.repository.split('/'); + const octokit = github.getOctokit(inputs.token); + yield octokit.repos.createDispatchEvent({ + owner: owner, + repo: repo, + event_type: inputs.eventType, + client_payload: JSON.parse(inputs.clientPayload) + }); + } + catch (error) { + core.debug(util_1.inspect(error)); + core.setFailed(error.message); + } + }); } -exports.getOctokitOptions = getOctokitOptions; -//# sourceMappingURL=utils.js.map +run(); + /***/ }), -/***/ 32: -/***/ (function(module, __unusedexports, __webpack_require__) { +/***/ 7351: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; -var shebangRegex = __webpack_require__(638); - -module.exports = function (str) { - var match = str.match(shebangRegex); - - if (!match) { - return null; - } - - var arr = match[0].replace(/#! ?/, '').split(' '); - var bin = arr[0].split('/').pop(); - var arg = arr[1]; - - return (bin === 'env' ? - arg : - bin + (arg ? ' ' + arg : '') - ); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; }; - +Object.defineProperty(exports, "__esModule", ({ value: true })); +const os = __importStar(__webpack_require__(2087)); +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; + } + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + let first = true; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + if (first) { + first = false; + } + else { + cmdStr += ','; + } + cmdStr += `${key}=${escapeProperty(val)}`; + } + } + } + } + cmdStr += `${CMD_STRING}${escapeData(this.message)}`; + return cmdStr; + } +} +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; + } + else if (typeof input === 'string' || input instanceof String) { + return input; + } + return JSON.stringify(input); +} +exports.toCommandValue = toCommandValue; +function escapeData(s) { + return toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function escapeProperty(s) { + return toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/:/g, '%3A') + .replace(/,/g, '%2C'); +} +//# sourceMappingURL=command.js.map /***/ }), -/***/ 44: -/***/ (function(__unusedmodule, exports) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -const Endpoints = { - actions: { - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], - createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], - createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], - createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], - deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], - deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], - deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], - deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], - downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], - downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], - getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], - getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], - getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], - listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], - listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], - listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], - listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], - listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], - listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], - listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], - listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"] - }, - activity: { - checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], - deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], - getFeeds: ["GET /feeds"], - getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], - getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], - listEventsForAuthenticatedUser: ["GET /users/{username}/events"], - listNotificationsForAuthenticatedUser: ["GET /notifications"], - listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], - listPublicEvents: ["GET /events"], - listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], - listPublicEventsForUser: ["GET /users/{username}/events/public"], - listPublicOrgEvents: ["GET /orgs/{org}/events"], - listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], - listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], - listReposStarredByAuthenticatedUser: ["GET /user/starred"], - listReposStarredByUser: ["GET /users/{username}/starred"], - listReposWatchedByUser: ["GET /users/{username}/subscriptions"], - listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], - listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], - listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], - markNotificationsAsRead: ["PUT /notifications"], - markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], - markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], - setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], - starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], - unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] - }, - apps: { - addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", { - mediaType: { - previews: ["machine-man"] - } - }], - checkToken: ["POST /applications/{client_id}/token"], - createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", { - mediaType: { - previews: ["corsair"] - } - }], - createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens", { - mediaType: { - previews: ["machine-man"] - } - }], - deleteAuthorization: ["DELETE /applications/{client_id}/grant"], - deleteInstallation: ["DELETE /app/installations/{installation_id}", { - mediaType: { - previews: ["machine-man"] - } - }], - deleteToken: ["DELETE /applications/{client_id}/token"], - getAuthenticated: ["GET /app", { - mediaType: { - previews: ["machine-man"] - } - }], - getBySlug: ["GET /apps/{app_slug}", { - mediaType: { - previews: ["machine-man"] - } - }], - getInstallation: ["GET /app/installations/{installation_id}", { - mediaType: { - previews: ["machine-man"] - } - }], - getOrgInstallation: ["GET /orgs/{org}/installation", { - mediaType: { - previews: ["machine-man"] - } - }], - getRepoInstallation: ["GET /repos/{owner}/{repo}/installation", { - mediaType: { - previews: ["machine-man"] - } - }], - getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], - getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], - getUserInstallation: ["GET /users/{username}/installation", { - mediaType: { - previews: ["machine-man"] - } - }], - listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], - listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories", { - mediaType: { - previews: ["machine-man"] - } - }], - listInstallations: ["GET /app/installations", { - mediaType: { - previews: ["machine-man"] - } - }], - listInstallationsForAuthenticatedUser: ["GET /user/installations", { - mediaType: { - previews: ["machine-man"] - } - }], - listPlans: ["GET /marketplace_listing/plans"], - listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], - listReposAccessibleToInstallation: ["GET /installation/repositories", { - mediaType: { - previews: ["machine-man"] - } - }], - listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], - removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", { - mediaType: { - previews: ["machine-man"] - } - }], - resetToken: ["PATCH /applications/{client_id}/token"], - revokeInstallationAccessToken: ["DELETE /installation/token"], - suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"] - }, - billing: { - getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], - getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"], - getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], - getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"], - getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"], - getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"] - }, - checks: { - create: ["POST /repos/{owner}/{repo}/check-runs", { - mediaType: { - previews: ["antiope"] - } - }], - createSuite: ["POST /repos/{owner}/{repo}/check-suites", { - mediaType: { - previews: ["antiope"] - } - }], - get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}", { - mediaType: { - previews: ["antiope"] - } - }], - getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}", { - mediaType: { - previews: ["antiope"] - } - }], - listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", { - mediaType: { - previews: ["antiope"] - } - }], - listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs", { - mediaType: { - previews: ["antiope"] - } - }], - listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", { - mediaType: { - previews: ["antiope"] - } - }], - listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites", { - mediaType: { - previews: ["antiope"] - } - }], - rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", { - mediaType: { - previews: ["antiope"] - } - }], - setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences", { - mediaType: { - previews: ["antiope"] - } - }], - update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}", { - mediaType: { - previews: ["antiope"] - } - }] - }, - codeScanning: { - getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_id}"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"] - }, - codesOfConduct: { - getAllCodesOfConduct: ["GET /codes_of_conduct", { - mediaType: { - previews: ["scarlet-witch"] - } - }], - getConductCode: ["GET /codes_of_conduct/{key}", { - mediaType: { - previews: ["scarlet-witch"] - } - }], - getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", { - mediaType: { - previews: ["scarlet-witch"] - } - }] - }, - emojis: { - get: ["GET /emojis"] - }, - gists: { - checkIsStarred: ["GET /gists/{gist_id}/star"], - create: ["POST /gists"], - createComment: ["POST /gists/{gist_id}/comments"], - delete: ["DELETE /gists/{gist_id}"], - deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], - fork: ["POST /gists/{gist_id}/forks"], - get: ["GET /gists/{gist_id}"], - getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], - getRevision: ["GET /gists/{gist_id}/{sha}"], - list: ["GET /gists"], - listComments: ["GET /gists/{gist_id}/comments"], - listCommits: ["GET /gists/{gist_id}/commits"], - listForUser: ["GET /users/{username}/gists"], - listForks: ["GET /gists/{gist_id}/forks"], - listPublic: ["GET /gists/public"], - listStarred: ["GET /gists/starred"], - star: ["PUT /gists/{gist_id}/star"], - unstar: ["DELETE /gists/{gist_id}/star"], - update: ["PATCH /gists/{gist_id}"], - updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] - }, - git: { - createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], - createCommit: ["POST /repos/{owner}/{repo}/git/commits"], - createRef: ["POST /repos/{owner}/{repo}/git/refs"], - createTag: ["POST /repos/{owner}/{repo}/git/tags"], - createTree: ["POST /repos/{owner}/{repo}/git/trees"], - deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], - getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], - getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], - getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], - getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], - getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], - listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], - updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] - }, - gitignore: { - getAllTemplates: ["GET /gitignore/templates"], - getTemplate: ["GET /gitignore/templates/{name}"] - }, - interactions: { - getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }], - setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", { - mediaType: { - previews: ["sombra"] - } - }] - }, - issues: { - addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], - checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], - create: ["POST /repos/{owner}/{repo}/issues"], - createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], - createLabel: ["POST /repos/{owner}/{repo}/labels"], - createMilestone: ["POST /repos/{owner}/{repo}/milestones"], - deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], - deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], - deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], - get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], - getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], - getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], - getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], - getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], - list: ["GET /issues"], - listAssignees: ["GET /repos/{owner}/{repo}/assignees"], - listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], - listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], - listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], - listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", { - mediaType: { - previews: ["mockingbird"] - } - }], - listForAuthenticatedUser: ["GET /user/issues"], - listForOrg: ["GET /orgs/{org}/issues"], - listForRepo: ["GET /repos/{owner}/{repo}/issues"], - listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], - listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], - listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], - listMilestones: ["GET /repos/{owner}/{repo}/milestones"], - lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], - removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], - removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], - setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], - unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], - update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], - updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], - updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], - updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] - }, - licenses: { - get: ["GET /licenses/{license}"], - getAllCommonlyUsed: ["GET /licenses"], - getForRepo: ["GET /repos/{owner}/{repo}/license"] - }, - markdown: { - render: ["POST /markdown"], - renderRaw: ["POST /markdown/raw", { - headers: { - "content-type": "text/plain; charset=utf-8" - } - }] - }, - meta: { - get: ["GET /meta"] - }, - migrations: { - cancelImport: ["DELETE /repos/{owner}/{repo}/import"], - deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], - getImportStatus: ["GET /repos/{owner}/{repo}/import"], - getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], - getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", { - mediaType: { - previews: ["wyandotte"] - } - }], - getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", { - mediaType: { - previews: ["wyandotte"] - } - }], - listForAuthenticatedUser: ["GET /user/migrations", { - mediaType: { - previews: ["wyandotte"] - } - }], - listForOrg: ["GET /orgs/{org}/migrations", { - mediaType: { - previews: ["wyandotte"] - } - }], - listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", { - mediaType: { - previews: ["wyandotte"] - } - }], - listReposForUser: ["GET /user/{migration_id}/repositories", { - mediaType: { - previews: ["wyandotte"] - } - }], - mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], - setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], - startForAuthenticatedUser: ["POST /user/migrations"], - startForOrg: ["POST /orgs/{org}/migrations"], - startImport: ["PUT /repos/{owner}/{repo}/import"], - unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", { - mediaType: { - previews: ["wyandotte"] - } - }], - unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", { - mediaType: { - previews: ["wyandotte"] - } - }], - updateImport: ["PATCH /repos/{owner}/{repo}/import"] - }, - orgs: { - blockUser: ["PUT /orgs/{org}/blocks/{username}"], - checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], - checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], - checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], - convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], - createInvitation: ["POST /orgs/{org}/invitations"], - createWebhook: ["POST /orgs/{org}/hooks"], - deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], - get: ["GET /orgs/{org}"], - getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], - getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], - getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], - list: ["GET /organizations"], - listAppInstallations: ["GET /orgs/{org}/installations", { - mediaType: { - previews: ["machine-man"] - } - }], - listBlockedUsers: ["GET /orgs/{org}/blocks"], - listForAuthenticatedUser: ["GET /user/orgs"], - listForUser: ["GET /users/{username}/orgs"], - listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], - listMembers: ["GET /orgs/{org}/members"], - listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], - listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], - listPendingInvitations: ["GET /orgs/{org}/invitations"], - listPublicMembers: ["GET /orgs/{org}/public_members"], - listWebhooks: ["GET /orgs/{org}/hooks"], - pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], - removeMember: ["DELETE /orgs/{org}/members/{username}"], - removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], - removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], - removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], - setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], - setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], - unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], - update: ["PATCH /orgs/{org}"], - updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], - updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"] - }, - projects: { - addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", { - mediaType: { - previews: ["inertia"] - } - }], - createCard: ["POST /projects/columns/{column_id}/cards", { - mediaType: { - previews: ["inertia"] - } - }], - createColumn: ["POST /projects/{project_id}/columns", { - mediaType: { - previews: ["inertia"] - } - }], - createForAuthenticatedUser: ["POST /user/projects", { - mediaType: { - previews: ["inertia"] - } - }], - createForOrg: ["POST /orgs/{org}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - createForRepo: ["POST /repos/{owner}/{repo}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - delete: ["DELETE /projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - deleteCard: ["DELETE /projects/columns/cards/{card_id}", { - mediaType: { - previews: ["inertia"] - } - }], - deleteColumn: ["DELETE /projects/columns/{column_id}", { - mediaType: { - previews: ["inertia"] - } - }], - get: ["GET /projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - getCard: ["GET /projects/columns/cards/{card_id}", { - mediaType: { - previews: ["inertia"] - } - }], - getColumn: ["GET /projects/columns/{column_id}", { - mediaType: { - previews: ["inertia"] - } - }], - getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", { - mediaType: { - previews: ["inertia"] - } - }], - listCards: ["GET /projects/columns/{column_id}/cards", { - mediaType: { - previews: ["inertia"] - } - }], - listCollaborators: ["GET /projects/{project_id}/collaborators", { - mediaType: { - previews: ["inertia"] - } - }], - listColumns: ["GET /projects/{project_id}/columns", { - mediaType: { - previews: ["inertia"] - } - }], - listForOrg: ["GET /orgs/{org}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - listForRepo: ["GET /repos/{owner}/{repo}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - listForUser: ["GET /users/{username}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - moveCard: ["POST /projects/columns/cards/{card_id}/moves", { - mediaType: { - previews: ["inertia"] - } - }], - moveColumn: ["POST /projects/columns/{column_id}/moves", { - mediaType: { - previews: ["inertia"] - } - }], - removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}", { - mediaType: { - previews: ["inertia"] - } - }], - update: ["PATCH /projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - updateCard: ["PATCH /projects/columns/cards/{card_id}", { - mediaType: { - previews: ["inertia"] - } - }], - updateColumn: ["PATCH /projects/columns/{column_id}", { - mediaType: { - previews: ["inertia"] - } - }] - }, - pulls: { - checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - create: ["POST /repos/{owner}/{repo}/pulls"], - createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], - createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], - get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], - getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - list: ["GET /repos/{owner}/{repo}/pulls"], - listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], - listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], - listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], - listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], - listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], - update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", { - mediaType: { - previews: ["lydian"] - } - }], - updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] - }, - rateLimit: { - get: ["GET /rate_limit"] - }, - reactions: { - createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - deleteLegacy: ["DELETE /reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] - } - }, { - deprecated: "octokit.reactions.deleteLegacy() is deprecated, see https://developer.github.com/v3/reactions/#delete-a-reaction-legacy" - }], - listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }], - listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] - } - }] - }, - repos: { - acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"], - addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], - addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts", { - mediaType: { - previews: ["dorian"] - } - }], - compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], - createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] - } - }], - createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], - createDeployKey: ["POST /repos/{owner}/{repo}/keys"], - createDeployment: ["POST /repos/{owner}/{repo}/deployments"], - createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], - createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], - createForAuthenticatedUser: ["POST /user/repos"], - createFork: ["POST /repos/{owner}/{repo}/forks"], - createInOrg: ["POST /orgs/{org}/repos"], - createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], - createPagesSite: ["POST /repos/{owner}/{repo}/pages", { - mediaType: { - previews: ["switcheroo"] - } - }], - createRelease: ["POST /repos/{owner}/{repo}/releases"], - createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate", { - mediaType: { - previews: ["baptiste"] - } - }], - createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"], - delete: ["DELETE /repos/{owner}/{repo}"], - deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], - deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] - } - }], - deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], - deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], - deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], - deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], - deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { - mediaType: { - previews: ["switcheroo"] - } - }], - deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], - deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], - deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes", { - mediaType: { - previews: ["london"] - } - }], - disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts", { - mediaType: { - previews: ["dorian"] - } - }], - downloadArchive: ["GET /repos/{owner}/{repo}/{archive_format}/{ref}"], - enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes", { - mediaType: { - previews: ["london"] - } - }], - enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts", { - mediaType: { - previews: ["dorian"] - } - }], - get: ["GET /repos/{owner}/{repo}"], - getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], - getAllTopics: ["GET /repos/{owner}/{repo}/topics", { - mediaType: { - previews: ["mercy"] - } - }], - getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], - getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], - getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], - getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], - getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], - getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], - getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], - getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], - getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], - getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] - } - }], - getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile", { - mediaType: { - previews: ["black-panther"] - } - }], - getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], - getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], - getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], - getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], - getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], - getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], - getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], - getPages: ["GET /repos/{owner}/{repo}/pages"], - getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], - getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], - getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], - getReadme: ["GET /repos/{owner}/{repo}/readme"], - getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], - getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], - getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], - getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], - getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], - getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], - getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], - getViews: ["GET /repos/{owner}/{repo}/traffic/views"], - getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], - listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", { - mediaType: { - previews: ["groot"] - } - }], - listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], - listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], - listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], - listCommits: ["GET /repos/{owner}/{repo}/commits"], - listContributors: ["GET /repos/{owner}/{repo}/contributors"], - listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], - listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], - listDeployments: ["GET /repos/{owner}/{repo}/deployments"], - listForAuthenticatedUser: ["GET /user/repos"], - listForOrg: ["GET /orgs/{org}/repos"], - listForUser: ["GET /users/{username}/repos"], - listForks: ["GET /repos/{owner}/{repo}/forks"], - listInvitations: ["GET /repos/{owner}/{repo}/invitations"], - listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], - listLanguages: ["GET /repos/{owner}/{repo}/languages"], - listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], - listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", { - mediaType: { - previews: ["groot"] - } - }], - listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], - listReleases: ["GET /repos/{owner}/{repo}/releases"], - listTags: ["GET /repos/{owner}/{repo}/tags"], - listTeams: ["GET /repos/{owner}/{repo}/teams"], - listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], - merge: ["POST /repos/{owner}/{repo}/merges"], - pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], - removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], - removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", { - mediaType: { - previews: ["mercy"] - } - }], - requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], - setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], - transfer: ["POST /repos/{owner}/{repo}/transfer"], - update: ["PATCH /repos/{owner}/{repo}"], - updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], - updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], - updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], - updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], - updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], - updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], - updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], - uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { - baseUrl: "https://uploads.github.com" - }] - }, - search: { - code: ["GET /search/code"], - commits: ["GET /search/commits", { - mediaType: { - previews: ["cloak"] - } - }], - issuesAndPullRequests: ["GET /search/issues"], - labels: ["GET /search/labels"], - repos: ["GET /search/repositories"], - topics: ["GET /search/topics", { - mediaType: { - previews: ["mercy"] - } - }], - users: ["GET /search/users"] - }, - teams: { - addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], - addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { - mediaType: { - previews: ["inertia"] - } - }], - checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - create: ["POST /orgs/{org}/teams"], - createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], - createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], - deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], - getByName: ["GET /orgs/{org}/teams/{team_slug}"], - getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], - list: ["GET /orgs/{org}/teams"], - listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], - listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], - listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], - listForAuthenticatedUser: ["GET /user/teams"], - listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], - listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], - listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects", { - mediaType: { - previews: ["inertia"] - } - }], - listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], - removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], - removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] - }, - users: { - addEmailForAuthenticated: ["POST /user/emails"], - block: ["PUT /user/blocks/{username}"], - checkBlocked: ["GET /user/blocks/{username}"], - checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], - checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKeyForAuthenticated: ["POST /user/gpg_keys"], - createPublicSshKeyForAuthenticated: ["POST /user/keys"], - deleteEmailForAuthenticated: ["DELETE /user/emails"], - deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"], - follow: ["PUT /user/following/{username}"], - getAuthenticated: ["GET /user"], - getByUsername: ["GET /users/{username}"], - getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"], - list: ["GET /users"], - listBlockedByAuthenticated: ["GET /user/blocks"], - listEmailsForAuthenticated: ["GET /user/emails"], - listFollowedByAuthenticated: ["GET /user/following"], - listFollowersForAuthenticatedUser: ["GET /user/followers"], - listFollowersForUser: ["GET /users/{username}/followers"], - listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeysForAuthenticated: ["GET /user/gpg_keys"], - listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmailsForAuthenticated: ["GET /user/public_emails"], - listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: ["GET /user/keys"], - setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"], - unblock: ["DELETE /user/blocks/{username}"], - unfollow: ["DELETE /user/following/{username}"], - updateAuthenticated: ["PATCH /user"] - } -}; - -const VERSION = "4.1.0"; - -function endpointsToMethods(octokit, endpointsMap) { - const newMethods = {}; - - for (const [scope, endpoints] of Object.entries(endpointsMap)) { - for (const [methodName, endpoint] of Object.entries(endpoints)) { - const [route, defaults, decorations] = endpoint; - const [method, url] = route.split(/ /); - const endpointDefaults = Object.assign({ - method, - url - }, defaults); - - if (!newMethods[scope]) { - newMethods[scope] = {}; - } - - const scopeMethods = newMethods[scope]; - - if (decorations) { - scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); - continue; - } - - scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); - } - } - - return newMethods; -} - -function decorate(octokit, scope, methodName, defaults, decorations) { - const requestWithDefaults = octokit.request.defaults(defaults); - /* istanbul ignore next */ - - function withDecorations(...args) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` - - if (decorations.mapToData) { - options = Object.assign({}, options, { - data: options[decorations.mapToData], - [decorations.mapToData]: undefined - }); - return requestWithDefaults(options); - } - - if (decorations.renamed) { - const [newScope, newMethodName] = decorations.renamed; - octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); - } - - if (decorations.deprecated) { - octokit.log.warn(decorations.deprecated); - } - - if (decorations.renamedParameters) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - const options = requestWithDefaults.endpoint.merge(...args); - - for (const [name, alias] of Object.entries(decorations.renamedParameters)) { - if (name in options) { - octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); - - if (!(alias in options)) { - options[alias] = options[name]; - } - - delete options[name]; - } - } - - return requestWithDefaults(options); - } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - - - return requestWithDefaults(...args); - } - - return Object.assign(withDecorations, requestWithDefaults); -} - -/** - * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary - * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is - * done, we will remove the registerEndpoints methods and return the methods - * directly as with the other plugins. At that point we will also remove the - * legacy workarounds and deprecations. - * - * See the plan at - * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1 - */ - -function restEndpointMethods(octokit) { - return endpointsToMethods(octokit, Endpoints); -} -restEndpointMethods.VERSION = VERSION; - -exports.restEndpointMethods = restEndpointMethods; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 48: -/***/ (function(module) { - -"use strict"; - - -// See http://www.robvanderwoude.com/escapechars.php -const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g; - -function escapeCommand(arg) { - // Escape meta chars - arg = arg.replace(metaCharsRegExp, '^$1'); - - return arg; -} - -function escapeArgument(arg, doubleEscapeMetaChars) { - // Convert to string - arg = `${arg}`; - - // Algorithm below is based on https://qntm.org/cmd - - // Sequence of backslashes followed by a double quote: - // double up all the backslashes and escape the double quote - arg = arg.replace(/(\\*)"/g, '$1$1\\"'); - - // Sequence of backslashes followed by the end of the string - // (which will become a double quote later): - // double up all the backslashes - arg = arg.replace(/(\\*)$/, '$1$1'); - - // All other backslashes occur literally - - // Quote the whole thing: - arg = `"${arg}"`; - - // Escape meta chars - arg = arg.replace(metaCharsRegExp, '^$1'); - - // Double escape meta chars if necessary - if (doubleEscapeMetaChars) { - arg = arg.replace(metaCharsRegExp, '^$1'); - } - - return arg; -} - -module.exports.command = escapeCommand; -module.exports.argument = escapeArgument; - - -/***/ }), - -/***/ 53: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -exports.Context = void 0; -const fs_1 = __webpack_require__(747); -const os_1 = __webpack_require__(87); -class Context { - /** - * Hydrate the context from the environment - */ - constructor() { - this.payload = {}; - if (process.env.GITHUB_EVENT_PATH) { - if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { - this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); - } - else { - const path = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); - } - } - this.eventName = process.env.GITHUB_EVENT_NAME; - this.sha = process.env.GITHUB_SHA; - this.ref = process.env.GITHUB_REF; - this.workflow = process.env.GITHUB_WORKFLOW; - this.action = process.env.GITHUB_ACTION; - this.actor = process.env.GITHUB_ACTOR; - this.job = process.env.GITHUB_JOB; - this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); - this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); - } - get issue() { - const payload = this.payload; - return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); - } - get repo() { - if (process.env.GITHUB_REPOSITORY) { - const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); - return { owner, repo }; - } - if (this.payload.repository) { - return { - owner: this.payload.repository.owner.login, - repo: this.payload.repository.name - }; - } - throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); - } -} -exports.Context = Context; -//# sourceMappingURL=context.js.map - -/***/ }), - -/***/ 87: -/***/ (function(module) { - -module.exports = require("os"); - -/***/ }), - -/***/ 101: -/***/ (function(module) { - -"use strict"; - - -const isWin = process.platform === 'win32'; - -function notFoundError(original, syscall) { - return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), { - code: 'ENOENT', - errno: 'ENOENT', - syscall: `${syscall} ${original.command}`, - path: original.command, - spawnargs: original.args, - }); -} - -function hookChildProcess(cp, parsed) { - if (!isWin) { - return; - } - - const originalEmit = cp.emit; - - cp.emit = function (name, arg1) { - // If emitting "exit" event and exit code is 1, we need to check if - // the command exists and emit an "error" instead - // See https://github.com/IndigoUnited/node-cross-spawn/issues/16 - if (name === 'exit') { - const err = verifyENOENT(arg1, parsed, 'spawn'); - - if (err) { - return originalEmit.call(cp, 'error', err); - } - } - - return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params - }; -} - -function verifyENOENT(status, parsed) { - if (isWin && status === 1 && !parsed.file) { - return notFoundError(parsed.original, 'spawn'); - } - - return null; -} - -function verifyENOENTSync(status, parsed) { - if (isWin && status === 1 && !parsed.file) { - return notFoundError(parsed.original, 'spawnSync'); - } - - return null; -} - -module.exports = { - hookChildProcess, - verifyENOENT, - verifyENOENTSync, - notFoundError, -}; - - -/***/ }), - -/***/ 109: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const core = __importStar(__webpack_require__(186)); -const github = __importStar(__webpack_require__(438)); -const util_1 = __webpack_require__(669); -function run() { - return __awaiter(this, void 0, void 0, function* () { - try { - const inputs = { - token: core.getInput('token'), - repository: core.getInput('repository'), - eventType: core.getInput('event-type'), - clientPayload: core.getInput('client-payload') - }; - core.debug(`Inputs: ${util_1.inspect(inputs)}`); - const [owner, repo] = inputs.repository.split('/'); - const octokit = github.getOctokit(inputs.token); - yield octokit.repos.createDispatchEvent({ - owner: owner, - repo: repo, - event_type: inputs.eventType, - client_payload: JSON.parse(inputs.clientPayload) - }); - } - catch (error) { - core.debug(util_1.inspect(error)); - core.setFailed(error.message); - } - }); -} -run(); - - -/***/ }), - -/***/ 126: -/***/ (function(module, __unusedexports, __webpack_require__) { - -var fs = __webpack_require__(747) -var core -if (process.platform === 'win32' || global.TESTING_WINDOWS) { - core = __webpack_require__(1) -} else { - core = __webpack_require__(728) -} - -module.exports = isexe -isexe.sync = sync - -function isexe (path, options, cb) { - if (typeof options === 'function') { - cb = options - options = {} - } - - if (!cb) { - if (typeof Promise !== 'function') { - throw new TypeError('callback not provided') - } - - return new Promise(function (resolve, reject) { - isexe(path, options || {}, function (er, is) { - if (er) { - reject(er) - } else { - resolve(is) - } - }) - }) - } - - core(path, options || {}, function (er, is) { - // ignore EACCES because that just means we aren't allowed to run it - if (er) { - if (er.code === 'EACCES' || options && options.ignoreErrors) { - er = null - is = false - } - } - cb(er, is) - }) -} - -function sync (path, options) { - // my kingdom for a filtered catch - try { - return core.sync(path, options || {}) - } catch (er) { - if (options && options.ignoreErrors || er.code === 'EACCES') { - return false - } else { - throw er - } - } -} - - -/***/ }), - -/***/ 129: -/***/ (function(module) { - -module.exports = require("child_process"); - -/***/ }), - -/***/ 166: -/***/ (function(module) { - -"use strict"; - -const alias = ['stdin', 'stdout', 'stderr']; - -const hasAlias = opts => alias.some(x => Boolean(opts[x])); - -module.exports = opts => { - if (!opts) { - return null; - } - - if (opts.stdio && hasAlias(opts)) { - throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`); - } - - if (typeof opts.stdio === 'string') { - return opts.stdio; - } - - const stdio = opts.stdio || []; - - if (!Array.isArray(stdio)) { - throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``); - } - - const result = []; - const len = Math.max(stdio.length, alias.length); - - for (let i = 0; i < len; i++) { - let value = null; - - if (stdio[i] !== undefined) { - value = stdio[i]; - } else if (opts[alias[i]] !== undefined) { - value = opts[alias[i]]; - } - - result[i] = value; - } - - return result; -}; - - -/***/ }), - -/***/ 186: -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ 2186: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; @@ -1800,10 +189,10 @@ var __importStar = (this && this.__importStar) || function (mod) { result["default"] = mod; return result; }; -Object.defineProperty(exports, "__esModule", { value: true }); -const command_1 = __webpack_require__(351); -const os = __importStar(__webpack_require__(87)); -const path = __importStar(__webpack_require__(622)); +Object.defineProperty(exports, "__esModule", ({ value: true })); +const command_1 = __webpack_require__(7351); +const os = __importStar(__webpack_require__(2087)); +const path = __importStar(__webpack_require__(5622)); /** * The code to exit an action */ @@ -1976,4159 +365,4721 @@ function group(name, fn) { finally { endGroup(); } - return result; - }); -} -exports.group = group; -//----------------------------------------------------------------------- -// Wrapper action state -//----------------------------------------------------------------------- -/** - * Saves state for current action, the state can only be retrieved by this action's post job execution. - * - * @param name name of the state to store - * @param value value to store. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function saveState(name, value) { - command_1.issueCommand('save-state', { name }, value); + return result; + }); +} +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function saveState(name, value) { + command_1.issueCommand('save-state', { name }, value); +} +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +//# sourceMappingURL=core.js.map + +/***/ }), + +/***/ 4087: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Context = void 0; +const fs_1 = __webpack_require__(5747); +const os_1 = __webpack_require__(2087); +class Context { + /** + * Hydrate the context from the environment + */ + constructor() { + this.payload = {}; + if (process.env.GITHUB_EVENT_PATH) { + if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) { + this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); + } + else { + const path = process.env.GITHUB_EVENT_PATH; + process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); + } + } + this.eventName = process.env.GITHUB_EVENT_NAME; + this.sha = process.env.GITHUB_SHA; + this.ref = process.env.GITHUB_REF; + this.workflow = process.env.GITHUB_WORKFLOW; + this.action = process.env.GITHUB_ACTION; + this.actor = process.env.GITHUB_ACTOR; + this.job = process.env.GITHUB_JOB; + this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); + this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); + } + get issue() { + const payload = this.payload; + return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); + } + get repo() { + if (process.env.GITHUB_REPOSITORY) { + const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); + return { owner, repo }; + } + if (this.payload.repository) { + return { + owner: this.payload.repository.owner.login, + repo: this.payload.repository.name + }; + } + throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); + } } -exports.saveState = saveState; +exports.Context = Context; +//# sourceMappingURL=context.js.map + +/***/ }), + +/***/ 5438: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOctokit = exports.context = void 0; +const Context = __importStar(__webpack_require__(4087)); +const utils_1 = __webpack_require__(3030); +exports.context = new Context.Context(); /** - * Gets the value of an state set by this action's main execution. + * Returns a hydrated octokit ready to use for GitHub Actions * - * @param name name of the state to get - * @returns string + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set */ -function getState(name) { - return process.env[`STATE_${name}`] || ''; +function getOctokit(token, options) { + return new utils_1.GitHub(utils_1.getOctokitOptions(token, options)); } -exports.getState = getState; -//# sourceMappingURL=core.js.map +exports.getOctokit = getOctokit; +//# sourceMappingURL=github.js.map /***/ }), -/***/ 193: -/***/ (function(__unusedmodule, exports) { +/***/ 7914: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { "use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; +const httpClient = __importStar(__webpack_require__(9925)); +function getAuthString(token, options) { + if (!token && !options.auth) { + throw new Error('Parameter token or opts.auth is required'); + } + else if (token && options.auth) { + throw new Error('Parameters token and opts.auth may not both be specified'); + } + return typeof options.auth === 'string' ? options.auth : `token ${token}`; +} +exports.getAuthString = getAuthString; +function getProxyAgent(destinationUrl) { + const hc = new httpClient.HttpClient(); + return hc.getAgent(destinationUrl); +} +exports.getProxyAgent = getProxyAgent; +function getApiBaseUrl() { + return process.env['GITHUB_API_URL'] || 'https://api.github.com'; +} +exports.getApiBaseUrl = getApiBaseUrl; +//# sourceMappingURL=utils.js.map + +/***/ }), -Object.defineProperty(exports, '__esModule', { value: true }); +/***/ 3030: +/***/ (function(__unused_webpack_module, exports, __webpack_require__) { -const VERSION = "2.2.3"; +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getOctokitOptions = exports.GitHub = exports.context = void 0; +const Context = __importStar(__webpack_require__(4087)); +const Utils = __importStar(__webpack_require__(7914)); +// octokit + plugins +const core_1 = __webpack_require__(6762); +const plugin_rest_endpoint_methods_1 = __webpack_require__(3044); +const plugin_paginate_rest_1 = __webpack_require__(4193); +exports.context = new Context.Context(); +const baseUrl = Utils.getApiBaseUrl(); +const defaults = { + baseUrl, + request: { + agent: Utils.getProxyAgent(baseUrl) + } +}; +exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(defaults); /** - * Some “list” response that can be paginated have a different response structure - * - * They have a `total_count` key in the response (search also has `incomplete_results`, - * /installation/repositories also has `repository_selection`), as well as a key with - * the list of the items which name varies from endpoint to endpoint. - * - * Octokit normalizes these responses so that paginated results are always returned following - * the same structure. One challenge is that if the list response has only one page, no Link - * header is provided, so this header alone is not sufficient to check wether a response is - * paginated or not. + * Convience function to correctly format Octokit Options to pass into the constructor. * - * We check if a "total_count" key is present in the response data, but also make sure that - * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would - * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref + * @param token the repo PAT or GITHUB_TOKEN + * @param options other options to set */ -function normalizePaginatedListResponse(response) { - const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); - if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way - // to retrieve the same information. - - const incompleteResults = response.data.incomplete_results; - const repositorySelection = response.data.repository_selection; - const totalCount = response.data.total_count; - delete response.data.incomplete_results; - delete response.data.repository_selection; - delete response.data.total_count; - const namespaceKey = Object.keys(response.data)[0]; - const data = response.data[namespaceKey]; - response.data = data; - - if (typeof incompleteResults !== "undefined") { - response.data.incomplete_results = incompleteResults; - } +function getOctokitOptions(token, options) { + const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller + // Auth + const auth = Utils.getAuthString(token, opts); + if (auth) { + opts.auth = auth; + } + return opts; +} +exports.getOctokitOptions = getOctokitOptions; +//# sourceMappingURL=utils.js.map - if (typeof repositorySelection !== "undefined") { - response.data.repository_selection = repositorySelection; - } +/***/ }), - response.data.total_count = totalCount; - return response; -} +/***/ 9925: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { -function iterator(octokit, route, parameters) { - const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); - const requestMethod = typeof route === "function" ? route : octokit.request; - const method = options.method; - const headers = options.headers; - let url = options.url; - return { - [Symbol.asyncIterator]: () => ({ - next() { - if (!url) { - return Promise.resolve({ - done: true - }); - } +"use strict"; - return requestMethod({ - method, - url, - headers - }).then(normalizePaginatedListResponse).then(response => { - // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - url = ((response.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { - value: response - }; +Object.defineProperty(exports, "__esModule", ({ value: true })); +const url = __webpack_require__(8835); +const http = __webpack_require__(8605); +const https = __webpack_require__(7211); +const pm = __webpack_require__(6443); +let tunnel; +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers = exports.Headers || (exports.Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + let proxyUrl = pm.getProxyUrl(url.parse(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; +} +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return new Promise(async (resolve, reject) => { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); }); - } - - }) - }; + } } - -function paginate(octokit, route, parameters, mapFn) { - if (typeof parameters === "function") { - mapFn = parameters; - parameters = undefined; - } - - return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + let parsedUrl = url.parse(requestUrl); + return parsedUrl.protocol === 'https:'; } - -function gather(octokit, results, iterator, mapFn) { - return iterator.next().then(result => { - if (result.done) { - return results; +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } } - - let earlyExit = false; - - function done() { - earlyExit = true; + options(requestUrl, additionalHeaders) { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + } + get(requestUrl, additionalHeaders) { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + } + del(requestUrl, additionalHeaders) { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + } + post(requestUrl, data, additionalHeaders) { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + } + patch(requestUrl, data, additionalHeaders) { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + } + put(requestUrl, data, additionalHeaders) { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + } + head(requestUrl, additionalHeaders) { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return this.request(verb, requestUrl, stream, additionalHeaders); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + async getJson(requestUrl, additionalHeaders = {}) { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + let res = await this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async postJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async putJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async patchJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + async request(verb, requestUrl, data, headers) { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + let parsedUrl = url.parse(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + while (numTries < maxTries) { + response = await this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (let i = 0; i < this.handlers.length; i++) { + if (this.handlers[i].canHandleAuthentication(response)) { + authenticationHandler = this.handlers[i]; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + let parsedRedirectUrl = url.parse(redirectUrl); + if (parsedUrl.protocol == 'https:' && + parsedUrl.protocol != parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + await response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (let header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = await this.requestRaw(info, data); + redirectsRemaining--; + } + if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + await response.readBody(); + await this._performExponentialBackoff(numTries); + } + } + return response; + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return new Promise((resolve, reject) => { + let callbackForResult = function (err, res) { + if (err) { + reject(err); + } + resolve(res); + }; + this.requestRawWithCallback(info, data, callbackForResult); + }); } - - results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); - - if (earlyExit) { - return results; + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + let socket; + if (typeof data === 'string') { + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + let handleResult = (err, res) => { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + }; + let req = info.httpModule.request(info.options, (msg) => { + let res = new HttpClientResponse(msg); + handleResult(null, res); + }); + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error('Request timeout: ' + info.options.path), null); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err, null); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } } - - return gather(octokit, results, iterator, mapFn); - }); -} - -/** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor - */ - -function paginateRest(octokit) { - return { - paginate: Object.assign(paginate.bind(null, octokit), { - iterator: iterator.bind(null, octokit) - }) - }; -} -paginateRest.VERSION = VERSION; - -exports.paginateRest = paginateRest; -//# sourceMappingURL=index.js.map - - -/***/ }), - -/***/ 205: -/***/ (function(module, __unusedexports, __webpack_require__) { - -var once = __webpack_require__(223); - -var noop = function() {}; - -var isRequest = function(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -}; - -var isChildProcess = function(stream) { - return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 -}; - -var eos = function(stream, opts, callback) { - if (typeof opts === 'function') return eos(stream, null, opts); - if (!opts) opts = {}; - - callback = once(callback || noop); - - var ws = stream._writableState; - var rs = stream._readableState; - var readable = opts.readable || (opts.readable !== false && stream.readable); - var writable = opts.writable || (opts.writable !== false && stream.writable); - var cancelled = false; - - var onlegacyfinish = function() { - if (!stream.writable) onfinish(); - }; - - var onfinish = function() { - writable = false; - if (!readable) callback.call(stream); - }; - - var onend = function() { - readable = false; - if (!writable) callback.call(stream); - }; - - var onexit = function(exitCode) { - callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); - }; - - var onerror = function(err) { - callback.call(stream, err); - }; - - var onclose = function() { - process.nextTick(onclosenexttick); - }; - - var onclosenexttick = function() { - if (cancelled) return; - if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); - if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); - }; - - var onrequest = function() { - stream.req.on('finish', onfinish); - }; - - if (isRequest(stream)) { - stream.on('complete', onfinish); - stream.on('abort', onclose); - if (stream.req) onrequest(); - else stream.on('request', onrequest); - } else if (writable && !ws) { // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); - } - - if (isChildProcess(stream)) stream.on('exit', onexit); - - stream.on('end', onend); - stream.on('finish', onfinish); - if (opts.error !== false) stream.on('error', onerror); - stream.on('close', onclose); - - return function() { - cancelled = true; - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('exit', onexit); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; -}; - -module.exports = eos; - - -/***/ }), - -/***/ 207: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = which -which.sync = whichSync - -var isWindows = process.platform === 'win32' || - process.env.OSTYPE === 'cygwin' || - process.env.OSTYPE === 'msys' - -var path = __webpack_require__(622) -var COLON = isWindows ? ';' : ':' -var isexe = __webpack_require__(126) - -function getNotFoundError (cmd) { - var er = new Error('not found: ' + cmd) - er.code = 'ENOENT' - - return er -} - -function getPathInfo (cmd, opt) { - var colon = opt.colon || COLON - var pathEnv = opt.path || process.env.PATH || '' - var pathExt = [''] - - pathEnv = pathEnv.split(colon) - - var pathExtExe = '' - if (isWindows) { - pathEnv.unshift(process.cwd()) - pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM') - pathExt = pathExtExe.split(colon) - - - // Always test the cmd itself first. isexe will check to make sure - // it's found in the pathExt set. - if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') - pathExt.unshift('') - } - - // If it has a slash, then we don't bother searching the pathenv. - // just check the file itself, and that's it. - if (cmd.match(/\//) || isWindows && cmd.match(/\\/)) - pathEnv = [''] - - return { - env: pathEnv, - ext: pathExt, - extExe: pathExtExe - } -} - -function which (cmd, opt, cb) { - if (typeof opt === 'function') { - cb = opt - opt = {} - } - - var info = getPathInfo(cmd, opt) - var pathEnv = info.env - var pathExt = info.ext - var pathExtExe = info.extExe - var found = [] - - ;(function F (i, l) { - if (i === l) { - if (opt.all && found.length) - return cb(null, found) - else - return cb(getNotFoundError(cmd)) + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + let parsedUrl = url.parse(serverUrl); + return this._getAgent(parsedUrl); } - - var pathPart = pathEnv[i] - if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') - pathPart = pathPart.slice(1, -1) - - var p = path.join(pathPart, cmd) - if (!pathPart && (/^\.[\\\/]/).test(cmd)) { - p = cmd.slice(0, 2) + p + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + this.handlers.forEach(handler => { + handler.prepareRequest(info.options); + }); + } + return info; } - ;(function E (ii, ll) { - if (ii === ll) return F(i + 1, l) - var ext = pathExt[ii] - isexe(p + ext, { pathExt: pathExtExe }, function (er, is) { - if (!er && is) { - if (opt.all) - found.push(p + ext) - else - return cb(null, p + ext) + _mergeHeaders(headers) { + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); } - return E(ii + 1, ll) - }) - })(0, pathExt.length) - })(0, pathEnv.length) -} - -function whichSync (cmd, opt) { - opt = opt || {} - - var info = getPathInfo(cmd, opt) - var pathEnv = info.env - var pathExt = info.ext - var pathExtExe = info.extExe - var found = [] - - for (var i = 0, l = pathEnv.length; i < l; i ++) { - var pathPart = pathEnv[i] - if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') - pathPart = pathPart.slice(1, -1) - - var p = path.join(pathPart, cmd) - if (!pathPart && /^\.[\\\/]/.test(cmd)) { - p = cmd.slice(0, 2) + p + return lowercaseKeys(headers || {}); } - for (var j = 0, ll = pathExt.length; j < ll; j ++) { - var cur = p + pathExt[j] - var is - try { - is = isexe.sync(cur, { pathExt: pathExtExe }) - if (is) { - if (opt.all) - found.push(cur) - else - return cur + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; } - } catch (ex) {} + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + let proxyUrl = pm.getProxyUrl(parsedUrl); + let useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (!!agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (!!this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + if (useProxy) { + // If using proxy, need tunnel + if (!tunnel) { + tunnel = __webpack_require__(4294); + } + const agentOptions = { + maxSockets: maxSockets, + keepAlive: this._keepAlive, + proxy: { + proxyAuth: proxyUrl.auth, + host: proxyUrl.hostname, + port: proxyUrl.port + } + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _performExponentialBackoff(retryNumber) { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + } + static dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + let a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + async _processResponse(res, options) { + return new Promise(async (resolve, reject) => { + const statusCode = res.message.statusCode; + const response = { + statusCode: statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode == HttpCodes.NotFound) { + resolve(response); + } + let obj; + let contents; + // get the result from the body + try { + contents = await res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = 'Failed request: (' + statusCode + ')'; + } + let err = new Error(msg); + // attach statusCode and body obj (if available) to the error object + err['statusCode'] = statusCode; + if (response.result) { + err['result'] = response.result; + } + reject(err); + } + else { + resolve(response); + } + }); } - } - - if (opt.all && found.length) - return found - - if (opt.nothrow) - return null - - throw getNotFoundError(cmd) -} - - -/***/ }), - -/***/ 211: -/***/ (function(module) { - -module.exports = require("https"); - -/***/ }), - -/***/ 219: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - - -var net = __webpack_require__(631); -var tls = __webpack_require__(16); -var http = __webpack_require__(605); -var https = __webpack_require__(211); -var events = __webpack_require__(614); -var assert = __webpack_require__(357); -var util = __webpack_require__(669); - - -exports.httpOverHttp = httpOverHttp; -exports.httpsOverHttp = httpsOverHttp; -exports.httpOverHttps = httpOverHttps; -exports.httpsOverHttps = httpsOverHttps; - - -function httpOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - return agent; -} - -function httpsOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; } +exports.HttpClient = HttpClient; -function httpOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - return agent; -} -function httpsOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} +/***/ }), +/***/ 6443: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { -function TunnelingAgent(options) { - var self = this; - self.options = options || {}; - self.proxyOptions = self.options.proxy || {}; - self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; - self.requests = []; - self.sockets = []; +"use strict"; - self.on('free', function onFree(socket, host, port, localAddress) { - var options = toOptions(host, port, localAddress); - for (var i = 0, len = self.requests.length; i < len; ++i) { - var pending = self.requests[i]; - if (pending.host === options.host && pending.port === options.port) { - // Detect the request to connect same origin server, - // reuse the connection. - self.requests.splice(i, 1); - pending.request.onSocket(socket); - return; - } +Object.defineProperty(exports, "__esModule", ({ value: true })); +const url = __webpack_require__(8835); +function getProxyUrl(reqUrl) { + let usingSsl = reqUrl.protocol === 'https:'; + let proxyUrl; + if (checkBypass(reqUrl)) { + return proxyUrl; } - socket.destroy(); - self.removeSocket(socket); - }); + let proxyVar; + if (usingSsl) { + proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; + } + else { + proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; + } + if (proxyVar) { + proxyUrl = url.parse(proxyVar); + } + return proxyUrl; } -util.inherits(TunnelingAgent, events.EventEmitter); - -TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { - var self = this; - var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); - - if (self.sockets.length >= this.maxSockets) { - // We are over limit so we'll add it to the queue. - self.requests.push(options); - return; - } - - // If we are under maxSockets create a new one. - self.createSocket(options, function(socket) { - socket.on('free', onFree); - socket.on('close', onCloseOrRemove); - socket.on('agentRemove', onCloseOrRemove); - req.onSocket(socket); - - function onFree() { - self.emit('free', socket, options); +exports.getProxyUrl = getProxyUrl; +function checkBypass(reqUrl) { + if (!reqUrl.hostname) { + return false; } - - function onCloseOrRemove(err) { - self.removeSocket(socket); - socket.removeListener('free', onFree); - socket.removeListener('close', onCloseOrRemove); - socket.removeListener('agentRemove', onCloseOrRemove); + let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + if (!noProxy) { + return false; } - }); -}; - -TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { - var self = this; - var placeholder = {}; - self.sockets.push(placeholder); - - var connectOptions = mergeOptions({}, self.proxyOptions, { - method: 'CONNECT', - path: options.host + ':' + options.port, - agent: false, - headers: { - host: options.host + ':' + options.port + // Determine the request port + let reqPort; + if (reqUrl.port) { + reqPort = Number(reqUrl.port); } - }); - if (options.localAddress) { - connectOptions.localAddress = options.localAddress; - } - if (connectOptions.proxyAuth) { - connectOptions.headers = connectOptions.headers || {}; - connectOptions.headers['Proxy-Authorization'] = 'Basic ' + - new Buffer(connectOptions.proxyAuth).toString('base64'); - } - - debug('making CONNECT request'); - var connectReq = self.request(connectOptions); - connectReq.useChunkedEncodingByDefault = false; // for v0.6 - connectReq.once('response', onResponse); // for v0.6 - connectReq.once('upgrade', onUpgrade); // for v0.6 - connectReq.once('connect', onConnect); // for v0.7 or later - connectReq.once('error', onError); - connectReq.end(); - - function onResponse(res) { - // Very hacky. This is necessary to avoid http-parser leaks. - res.upgrade = true; - } - - function onUpgrade(res, socket, head) { - // Hacky. - process.nextTick(function() { - onConnect(res, socket, head); - }); - } - - function onConnect(res, socket, head) { - connectReq.removeAllListeners(); - socket.removeAllListeners(); - - if (res.statusCode !== 200) { - debug('tunneling socket could not be established, statusCode=%d', - res.statusCode); - socket.destroy(); - var error = new Error('tunneling socket could not be established, ' + - 'statusCode=' + res.statusCode); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; + else if (reqUrl.protocol === 'http:') { + reqPort = 80; } - if (head.length > 0) { - debug('got illegal response body from proxy'); - socket.destroy(); - var error = new Error('got illegal response body from proxy'); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; + else if (reqUrl.protocol === 'https:') { + reqPort = 443; } - debug('tunneling connection has established'); - self.sockets[self.sockets.indexOf(placeholder)] = socket; - return cb(socket); - } - - function onError(cause) { - connectReq.removeAllListeners(); - - debug('tunneling socket could not be established, cause=%s\n', - cause.message, cause.stack); - var error = new Error('tunneling socket could not be established, ' + - 'cause=' + cause.message); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - } -}; - -TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { - var pos = this.sockets.indexOf(socket) - if (pos === -1) { - return; - } - this.sockets.splice(pos, 1); - - var pending = this.requests.shift(); - if (pending) { - // If we have pending requests and a socket gets closed a new one - // needs to be created to take over in the pool for the one that closed. - this.createSocket(pending, function(socket) { - pending.request.onSocket(socket); - }); - } -}; - -function createSecureSocket(options, cb) { - var self = this; - TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { - var hostHeader = options.request.getHeader('host'); - var tlsOptions = mergeOptions({}, self.options, { - socket: socket, - servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host - }); - - // 0 is dummy port for v0.6 - var secureSocket = tls.connect(0, tlsOptions); - self.sockets[self.sockets.indexOf(socket)] = secureSocket; - cb(secureSocket); - }); -} - - -function toOptions(host, port, localAddress) { - if (typeof host === 'string') { // since v0.10 - return { - host: host, - port: port, - localAddress: localAddress - }; - } - return host; // for v0.11 or later -} - -function mergeOptions(target) { - for (var i = 1, len = arguments.length; i < len; ++i) { - var overrides = arguments[i]; - if (typeof overrides === 'object') { - var keys = Object.keys(overrides); - for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { - var k = keys[j]; - if (overrides[k] !== undefined) { - target[k] = overrides[k]; + // Format the request hostname and hostname with port + let upperReqHosts = [reqUrl.hostname.toUpperCase()]; + if (typeof reqPort === 'number') { + upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); + } + // Compare request host against noproxy + for (let upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { + if (upperReqHosts.some(x => x === upperNoProxyItem)) { + return true; } - } - } - } - return target; -} - - -var debug; -if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { - debug = function() { - var args = Array.prototype.slice.call(arguments); - if (typeof args[0] === 'string') { - args[0] = 'TUNNEL: ' + args[0]; - } else { - args.unshift('TUNNEL:'); } - console.error.apply(console, args); - } -} else { - debug = function() {}; + return false; } -exports.debug = debug; // for test +exports.checkBypass = checkBypass; /***/ }), -/***/ 223: -/***/ (function(module, __unusedexports, __webpack_require__) { +/***/ 334: +/***/ ((__unused_webpack_module, exports) => { -var wrappy = __webpack_require__(940) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) +"use strict"; -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) +Object.defineProperty(exports, "__esModule", ({ value: true })); -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) - } - f.called = false - return f +async function auth(token) { + const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; + return { + type: "token", + token: token, + tokenType + }; } -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) +/** + * Prefix token for usage in the Authorization header + * + * @param token OAuth token or JSON Web Token + */ +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f -} + return `token ${token}`; +} -/***/ }), - -/***/ 234: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge(route, parameters); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} +const createTokenAuth = function createTokenAuth(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } -Object.defineProperty(exports, '__esModule', { value: true }); + if (typeof token !== "string") { + throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); + } -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; -var endpoint = __webpack_require__(440); -var universalUserAgent = __webpack_require__(429); -var isPlainObject = _interopDefault(__webpack_require__(840)); -var nodeFetch = _interopDefault(__webpack_require__(467)); -var requestError = __webpack_require__(537); +exports.createTokenAuth = createTokenAuth; +//# sourceMappingURL=index.js.map -const VERSION = "5.4.5"; -function getBufferResponse(response) { - return response.arrayBuffer(); -} +/***/ }), -function fetchWrapper(requestOptions) { - if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { - requestOptions.body = JSON.stringify(requestOptions.body); - } +/***/ 6762: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - let headers = {}; - let status; - let url; - const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; - return fetch(requestOptions.url, Object.assign({ - method: requestOptions.method, - body: requestOptions.body, - headers: requestOptions.headers, - redirect: requestOptions.redirect - }, requestOptions.request)).then(response => { - url = response.url; - status = response.status; +"use strict"; - for (const keyAndValue of response.headers) { - headers[keyAndValue[0]] = keyAndValue[1]; - } - if (status === 204 || status === 205) { - return; - } // GitHub API returns 200 for HEAD requests +Object.defineProperty(exports, "__esModule", ({ value: true })); +var universalUserAgent = __webpack_require__(5030); +var beforeAfterHook = __webpack_require__(3682); +var request = __webpack_require__(6234); +var graphql = __webpack_require__(8467); +var authToken = __webpack_require__(334); - if (requestOptions.method === "HEAD") { - if (status < 400) { - return; - } +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } - throw new requestError.RequestError(response.statusText, status, { - headers, - request: requestOptions - }); - } + return obj; +} - if (status === 304) { - throw new requestError.RequestError("Not modified", status, { - headers, - request: requestOptions - }); - } +function ownKeys(object, enumerableOnly) { + var keys = Object.keys(object); - if (status >= 400) { - return response.text().then(message => { - const error = new requestError.RequestError(message, status, { - headers, - request: requestOptions - }); + if (Object.getOwnPropertySymbols) { + var symbols = Object.getOwnPropertySymbols(object); + if (enumerableOnly) symbols = symbols.filter(function (sym) { + return Object.getOwnPropertyDescriptor(object, sym).enumerable; + }); + keys.push.apply(keys, symbols); + } - try { - let responseBody = JSON.parse(error.message); - Object.assign(error, responseBody); - let errors = responseBody.errors; // Assumption `errors` would always be in Array format + return keys; +} - error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); - } catch (e) {// ignore, see octokit/rest.js#684 - } +function _objectSpread2(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] != null ? arguments[i] : {}; - throw error; + if (i % 2) { + ownKeys(Object(source), true).forEach(function (key) { + _defineProperty(target, key, source[key]); + }); + } else if (Object.getOwnPropertyDescriptors) { + Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); + } else { + ownKeys(Object(source)).forEach(function (key) { + Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } + } - const contentType = response.headers.get("content-type"); - - if (/application\/json/.test(contentType)) { - return response.json(); - } - - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); - } - - return getBufferResponse(response); - }).then(data => { - return { - status, - url, - headers, - data - }; - }).catch(error => { - if (error instanceof requestError.RequestError) { - throw error; - } - - throw new requestError.RequestError(error.message, 500, { - headers, - request: requestOptions - }); - }); + return target; } -function withDefaults(oldEndpoint, newDefaults) { - const endpoint = oldEndpoint.defaults(newDefaults); +const VERSION = "3.1.0"; - const newApi = function (route, parameters) { - const endpointOptions = endpoint.merge(route, parameters); +class Octokit { + constructor(options = {}) { + const hook = new beforeAfterHook.Collection(); + const requestDefaults = { + baseUrl: request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; // prepend default user agent with `options.userAgent` if set - if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint.parse(endpointOptions)); + requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); + + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; } - const request = (route, parameters) => { - return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); - }; + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } - Object.assign(request, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); - return endpointOptions.request.hook(request, endpointOptions); - }; + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } - return Object.assign(newApi, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); -} + this.request = request.request.defaults(requestDefaults); + this.graphql = graphql.withCustomRequest(this.request).defaults(_objectSpread2(_objectSpread2({}, requestDefaults), {}, { + baseUrl: requestDefaults.baseUrl.replace(/\/api\/v3$/, "/api") + })); + this.log = Object.assign({ + debug: () => {}, + info: () => {}, + warn: console.warn.bind(console), + error: console.error.bind(console) + }, options.log); + this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance + // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registred. + // (2) If only `options.auth` is set, use the default token authentication strategy. + // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. + // TODO: type `options.auth` based on `options.authStrategy`. -const request = withDefaults(endpoint.endpoint, { - headers: { - "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` - } -}); + if (!options.authStrategy) { + if (!options.auth) { + // (1) + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + // (2) + const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ -exports.request = request; -//# sourceMappingURL=index.js.map + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const auth = options.authStrategy(Object.assign({ + request: this.request + }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ + hook.wrap("request", auth.hook); + this.auth = auth; + } // apply plugins + // https://stackoverflow.com/a/16345172 -/***/ }), -/***/ 252: -/***/ (function(module, __unusedexports, __webpack_require__) { + const classConstructor = this.constructor; + classConstructor.plugins.forEach(plugin => { + Object.assign(this, plugin(this, options)); + }); + } -"use strict"; + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } -const fs = __webpack_require__(747); -const shebangCommand = __webpack_require__(32); + super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null)); + } -function readShebang(command) { - // Read the first 150 bytes from the file - const size = 150; - let buffer; + }; + return OctokitWithDefaults; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ - if (Buffer.alloc) { - // Node.js v4.5+ / v5.10+ - buffer = Buffer.alloc(size); - } else { - // Old Node.js API - buffer = new Buffer(size); - buffer.fill(0); // zero-fill - } - let fd; + static plugin(...newPlugins) { + var _a; - try { - fd = fs.openSync(command, 'r'); - fs.readSync(fd, buffer, 0, size, 0); - fs.closeSync(fd); - } catch (e) { /* Empty */ } + const currentPlugins = this.plugins; + const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); + return NewOctokit; + } - // Attempt to extract shebang (null is returned if not a shebang) - return shebangCommand(buffer.toString()); } +Octokit.VERSION = VERSION; +Octokit.plugins = []; -module.exports = readShebang; +exports.Octokit = Octokit; +//# sourceMappingURL=index.js.map /***/ }), -/***/ 274: -/***/ (function(module, __unusedexports, __webpack_require__) { +/***/ 9440: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -const path = __webpack_require__(622); -const which = __webpack_require__(207); -const pathKey = __webpack_require__(539)(); - -function resolveCommandAttempt(parsed, withoutPathExt) { - const cwd = process.cwd(); - const hasCustomCwd = parsed.options.cwd != null; - - // If a custom `cwd` was specified, we need to change the process cwd - // because `which` will do stat calls but does not support a custom cwd - if (hasCustomCwd) { - try { - process.chdir(parsed.options.cwd); - } catch (err) { - /* Empty */ - } - } +Object.defineProperty(exports, "__esModule", ({ value: true })); - let resolved; +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - try { - resolved = which.sync(parsed.command, { - path: (parsed.options.env || process.env)[pathKey], - pathExt: withoutPathExt ? path.delimiter : undefined, - }); - } catch (e) { - /* Empty */ - } finally { - process.chdir(cwd); - } +var isPlainObject = _interopDefault(__webpack_require__(8840)); +var universalUserAgent = __webpack_require__(5030); - // If we successfully resolved, ensure that an absolute path is returned - // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it - if (resolved) { - resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved); - } +function lowercaseKeys(object) { + if (!object) { + return {}; + } - return resolved; + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); } -function resolveCommand(parsed) { - return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true); +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach(key => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) Object.assign(result, { + [key]: options[key] + });else result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { + [key]: options[key] + }); + } + }); + return result; } -module.exports = resolveCommand; +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { + method, + url + } : { + url: method + }, options); + } else { + options = Object.assign({}, route); + } // lowercase header names before merging with defaults to avoid duplicates -/***/ }), + options.headers = lowercaseKeys(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten -/***/ 294: -/***/ (function(module, __unusedexports, __webpack_require__) { + if (defaults && defaults.mediaType.previews.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); + } -module.exports = __webpack_require__(219); + mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); + return mergedOptions; +} +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); -/***/ }), + if (names.length === 0) { + return url; + } -/***/ 330: -/***/ (function(module) { + return url + separator + names.map(name => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } -"use strict"; + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} -module.exports = (promise, onFinally) => { - onFinally = onFinally || (() => {}); +const urlVariableRegex = /\{[^}]+\}/g; - return promise.then( - val => new Promise(resolve => { - resolve(onFinally()); - }).then(() => val), - err => new Promise(resolve => { - resolve(onFinally()); - }).then(() => { - throw err; - }) - ); -}; +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); -/***/ }), + if (!matches) { + return []; + } -/***/ 334: -/***/ (function(__unusedmodule, exports) { + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} -"use strict"; +function omit(object, keysToOmit) { + return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { + obj[key] = object[key]; + return obj; + }, {}); +} +// Based on https://github.com/bramstein/url-template, licensed under BSD +// TODO: create separate package. +// +// Copyright (c) 2012-2014, Bram Stein +// All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// 1. Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// 2. Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// 3. The name of the author may not be used to endorse or promote products +// derived from this software without specific prior written permission. +// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED +// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -Object.defineProperty(exports, '__esModule', { value: true }); +/* istanbul ignore file */ +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } -async function auth(token) { - const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; - return { - type: "token", - token: token, - tokenType - }; + return part; + }).join(""); } -/** - * Prefix token for usage in the Authorization header - * - * @param token OAuth token or JSON Web Token - */ -function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; - } - - return `token ${token}`; +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); } -async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge(route, parameters); - endpoint.headers.authorization = withAuthorizationPrefix(token); - return request(endpoint); -} +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); -const createTokenAuth = function createTokenAuth(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; } +} - if (typeof token !== "string") { - throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); - } +function isDefined(value) { + return value !== undefined && value !== null; +} - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); -}; +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} -exports.createTokenAuth = createTokenAuth; -//# sourceMappingURL=index.js.map +function getValues(context, operator, key, modifier) { + var value = context[key], + result = []; + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); -/***/ }), + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } -/***/ 341: -/***/ (function(module, __unusedexports, __webpack_require__) { + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; -var once = __webpack_require__(223) -var eos = __webpack_require__(205) -var fs = __webpack_require__(747) // we only need fs to get the ReadStream and WriteStream prototypes + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + tmp.push(encodeValue(operator, value)); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } -var noop = function () {} -var ancient = /^v?\.0/.test(process.version) + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } + } + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } -var isFn = function (fn) { - return typeof fn === 'function' + return result; } -var isFS = function (stream) { - if (!ancient) return false // newer node version do not need to care about fs is a special way - if (!fs) return false // browser - return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; } -var isRequest = function (stream) { - return stream.setHeader && isFn(stream.abort) -} +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; -var destroyer = function (stream, reading, writing, callback) { - callback = once(callback) + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); + } - var closed = false - stream.on('close', function () { - closed = true - }) + expression.split(/,/g).forEach(function (variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); - eos(stream, {readable: reading, writable: writing}, function (err) { - if (err) return callback(err) - closed = true - callback() - }) + if (operator && operator !== "+") { + var separator = ","; - var destroyed = false - return function (err) { - if (closed) return - if (destroyed) return - destroyed = true + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } - if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks - if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); + } + } else { + return encodeReserved(literal); + } + }); +} - if (isFn(stream.destroy)) return stream.destroy() +function parse(options) { + // https://fetch.spec.whatwg.org/#methods + let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible - callback(err || new Error('stream was destroyed')) - } -} + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{+$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later -var call = function (fn) { - fn() -} + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); -var pipe = function (from, to) { - return from.pipe(to) -} + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } -var pump = function () { - var streams = Array.prototype.slice.call(arguments) - var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop + const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequset = /application\/octet-stream/i.test(headers.accept); - if (Array.isArray(streams[0])) streams = streams[0] - if (streams.length < 2) throw new Error('pump requires two streams per minimum') + if (!isBinaryRequset) { + if (options.mediaType.format) { + // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw + headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); + } - var error - var destroys = streams.map(function (stream, i) { - var reading = i < streams.length - 1 - var writing = i > 0 - return destroyer(stream, reading, writing, function (err) { - if (!error) error = err - if (err) destroys.forEach(call) - if (reading) return - destroys.forEach(call) - callback(error) - }) - }) + if (options.mediaType.previews.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } // for GET/HEAD requests, set URL query parameters from remaining parameters + // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters - return streams.reduce(pipe) -} -module.exports = pump + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } else { + headers["content-length"] = 0; + } + } + } // default content-type for JSON if body is set + + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. + // fetch does not allow to set `content-length` header, but we can set body to an empty string -/***/ }), -/***/ 351: -/***/ (function(__unusedmodule, exports, __webpack_require__) { + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } // Only return body/request keys if present -"use strict"; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const os = __importStar(__webpack_require__(87)); -/** - * Commands - * - * Command Format: - * ::name key=value,key=value::message - * - * Examples: - * ::warning::This is the message - * ::set-env name=MY_VAR::some value - */ -function issueCommand(command, properties, message) { - const cmd = new Command(command, properties, message); - process.stdout.write(cmd.toString() + os.EOL); -} -exports.issueCommand = issueCommand; -function issue(name, message = '') { - issueCommand(name, {}, message); -} -exports.issue = issue; -const CMD_STRING = '::'; -class Command { - constructor(command, properties, message) { - if (!command) { - command = 'missing.command'; - } - this.command = command; - this.properties = properties; - this.message = message; - } - toString() { - let cmdStr = CMD_STRING + this.command; - if (this.properties && Object.keys(this.properties).length > 0) { - cmdStr += ' '; - let first = true; - for (const key in this.properties) { - if (this.properties.hasOwnProperty(key)) { - const val = this.properties[key]; - if (val) { - if (first) { - first = false; - } - else { - cmdStr += ','; - } - cmdStr += `${key}=${escapeProperty(val)}`; - } - } - } - } - cmdStr += `${CMD_STRING}${escapeData(this.message)}`; - return cmdStr; - } -} -/** - * Sanitizes an input into a string so it can be passed into issueCommand safely - * @param input input to sanitize into a string - */ -function toCommandValue(input) { - if (input === null || input === undefined) { - return ''; - } - else if (typeof input === 'string' || input instanceof String) { - return input; - } - return JSON.stringify(input); + return Object.assign({ + method, + url, + headers + }, typeof body !== "undefined" ? { + body + } : null, options.request ? { + request: options.request + } : null); } -exports.toCommandValue = toCommandValue; -function escapeData(s) { - return toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A'); + +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); } -function escapeProperty(s) { - return toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A') - .replace(/:/g, '%3A') - .replace(/,/g, '%2C'); + +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS = merge(oldDefaults, newDefaults); + const endpoint = endpointWithDefaults.bind(null, DEFAULTS); + return Object.assign(endpoint, { + DEFAULTS, + defaults: withDefaults.bind(null, DEFAULTS), + merge: merge.bind(null, DEFAULTS), + parse + }); } -//# sourceMappingURL=command.js.map -/***/ }), +const VERSION = "6.0.3"; -/***/ 357: -/***/ (function(module) { +const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. +// So we use RequestParameters and add method as additional required property. -module.exports = require("assert"); +const DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "", + previews: [] + } +}; -/***/ }), +const endpoint = withDefaults(null, DEFAULTS); -/***/ 413: -/***/ (function(module) { +exports.endpoint = endpoint; +//# sourceMappingURL=index.js.map -module.exports = require("stream"); /***/ }), -/***/ 429: -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ 8467: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -Object.defineProperty(exports, '__esModule', { value: true }); +Object.defineProperty(exports, "__esModule", ({ value: true })); -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } +var request = __webpack_require__(6234); +var universalUserAgent = __webpack_require__(5030); -var osName = _interopDefault(__webpack_require__(824)); +const VERSION = "4.5.1"; -function getUserAgent() { - try { - return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`; - } catch (error) { - if (/wmic os get Caption/.test(error.message)) { - return "Windows "; - } +class GraphqlError extends Error { + constructor(request, response) { + const message = response.data.errors[0].message; + super(message); + Object.assign(this, response.data); + this.name = "GraphqlError"; + this.request = request; // Maintains proper stack trace (only available on V8) - return ""; + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } } + } -exports.getUserAgent = getUserAgent; -//# sourceMappingURL=index.js.map +const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; +function graphql(request, query, options) { + options = typeof query === "string" ? options = Object.assign({ + query + }, options) : options = query; + const requestOptions = Object.keys(options).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = options[key]; + return result; + } + if (!result.variables) { + result.variables = {}; + } -/***/ }), + result.variables[key] = options[key]; + return result; + }, {}); + return request(requestOptions).then(response => { + if (response.data.errors) { + throw new GraphqlError(requestOptions, { + data: response.data + }); + } -/***/ 438: -/***/ (function(__unusedmodule, exports, __webpack_require__) { + return response.data.data; + }); +} -"use strict"; +function withDefaults(request$1, newDefaults) { + const newRequest = request$1.defaults(newDefaults); -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + + return Object.assign(newApi, { + defaults: withDefaults.bind(null, newRequest), + endpoint: request.request.endpoint + }); +} + +const graphql$1 = withDefaults(request.request, { + headers: { + "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` + }, + method: "POST", + url: "/graphql" }); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getOctokit = exports.context = void 0; -const Context = __importStar(__webpack_require__(53)); -const utils_1 = __webpack_require__(30); -exports.context = new Context.Context(); -/** - * Returns a hydrated octokit ready to use for GitHub Actions - * - * @param token the repo PAT or GITHUB_TOKEN - * @param options other options to set - */ -function getOctokit(token, options) { - return new utils_1.GitHub(utils_1.getOctokitOptions(token, options)); +function withCustomRequest(customRequest) { + return withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); } -exports.getOctokit = getOctokit; -//# sourceMappingURL=github.js.map + +exports.graphql = graphql$1; +exports.withCustomRequest = withCustomRequest; +//# sourceMappingURL=index.js.map + /***/ }), -/***/ 440: -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ 4193: +/***/ ((__unused_webpack_module, exports) => { "use strict"; -Object.defineProperty(exports, '__esModule', { value: true }); +Object.defineProperty(exports, "__esModule", ({ value: true })); -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } +const VERSION = "2.2.3"; -var isPlainObject = _interopDefault(__webpack_require__(840)); -var universalUserAgent = __webpack_require__(429); +/** + * Some “list” response that can be paginated have a different response structure + * + * They have a `total_count` key in the response (search also has `incomplete_results`, + * /installation/repositories also has `repository_selection`), as well as a key with + * the list of the items which name varies from endpoint to endpoint. + * + * Octokit normalizes these responses so that paginated results are always returned following + * the same structure. One challenge is that if the list response has only one page, no Link + * header is provided, so this header alone is not sufficient to check wether a response is + * paginated or not. + * + * We check if a "total_count" key is present in the response data, but also make sure that + * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would + * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref + */ +function normalizePaginatedListResponse(response) { + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way + // to retrieve the same information. -function lowercaseKeys(object) { - if (!object) { - return {}; + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; } - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); -} + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } -function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach(key => { - if (isPlainObject(options[key])) { - if (!(key in defaults)) Object.assign(result, { - [key]: options[key] - });else result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { - [key]: options[key] - }); - } - }); - return result; + response.data.total_count = totalCount; + return response; } -function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { - method, - url - } : { - url: method - }, options); - } else { - options = Object.assign({}, route); - } // lowercase header names before merging with defaults to avoid duplicates +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + next() { + if (!url) { + return Promise.resolve({ + done: true + }); + } + return requestMethod({ + method, + url, + headers + }).then(normalizePaginatedListResponse).then(response => { + // `response.headers.link` format: + // '; rel="next", ; rel="last"' + // sets `url` to undefined if "next" URL is not present or `link` header is not set + url = ((response.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; + return { + value: response + }; + }); + } - options.headers = lowercaseKeys(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten + }) + }; +} - if (defaults && defaults.mediaType.previews.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = undefined; } - mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); - return mergedOptions; + return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); } -function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); +function gather(octokit, results, iterator, mapFn) { + return iterator.next().then(result => { + if (result.done) { + return results; + } - if (names.length === 0) { - return url; - } + let earlyExit = false; - return url + separator + names.map(name => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + function done() { + earlyExit = true; } - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); -} + results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); -const urlVariableRegex = /\{[^}]+\}/g; + if (earlyExit) { + return results; + } -function removeNonChars(variableName) { - return variableName.replace(/^\W+|\W+$/g, "").split(/,/); + return gather(octokit, results, iterator, mapFn); + }); } -function extractUrlVariableNames(url) { - const matches = url.match(urlVariableRegex); - - if (!matches) { - return []; - } +/** + * @param octokit Octokit instance + * @param options Options passed to Octokit constructor + */ - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) + }; } +paginateRest.VERSION = VERSION; -function omit(object, keysToOmit) { - return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { - obj[key] = object[key]; - return obj; - }, {}); -} +exports.paginateRest = paginateRest; +//# sourceMappingURL=index.js.map -// Based on https://github.com/bramstein/url-template, licensed under BSD -// TODO: create separate package. -// -// Copyright (c) 2012-2014, Bram Stein -// All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// 1. Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright -// notice, this list of conditions and the following disclaimer in the -// documentation and/or other materials provided with the distribution. -// 3. The name of the author may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY -// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, -// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -/* istanbul ignore file */ -function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); - } +/***/ }), - return part; - }).join(""); -} +/***/ 3044: +/***/ ((__unused_webpack_module, exports) => { -function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); - }); -} +"use strict"; -function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; +Object.defineProperty(exports, "__esModule", ({ value: true })); + +const Endpoints = { + actions: { + addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], + createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], + createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], + deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], + deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], + deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], + deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], + downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], + downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], + getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], + listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], + listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"] + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], + listPublicEvents: ["GET /events"], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markNotificationsAsRead: ["PUT /notifications"], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] + }, + apps: { + addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", { + mediaType: { + previews: ["machine-man"] + } + }], + checkToken: ["POST /applications/{client_id}/token"], + createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", { + mediaType: { + previews: ["corsair"] + } + }], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens", { + mediaType: { + previews: ["machine-man"] + } + }], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}", { + mediaType: { + previews: ["machine-man"] + } + }], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app", { + mediaType: { + previews: ["machine-man"] + } + }], + getBySlug: ["GET /apps/{app_slug}", { + mediaType: { + previews: ["machine-man"] + } + }], + getInstallation: ["GET /app/installations/{installation_id}", { + mediaType: { + previews: ["machine-man"] + } + }], + getOrgInstallation: ["GET /orgs/{org}/installation", { + mediaType: { + previews: ["machine-man"] + } + }], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation", { + mediaType: { + previews: ["machine-man"] + } + }], + getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], + getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], + getUserInstallation: ["GET /users/{username}/installation", { + mediaType: { + previews: ["machine-man"] + } + }], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], + listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories", { + mediaType: { + previews: ["machine-man"] + } + }], + listInstallations: ["GET /app/installations", { + mediaType: { + previews: ["machine-man"] + } + }], + listInstallationsForAuthenticatedUser: ["GET /user/installations", { + mediaType: { + previews: ["machine-man"] + } + }], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listReposAccessibleToInstallation: ["GET /installation/repositories", { + mediaType: { + previews: ["machine-man"] + } + }], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], + removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", { + mediaType: { + previews: ["machine-man"] + } + }], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"] + }, + billing: { + getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], + getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"], + getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], + getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"], + getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"], + getSharedStorageBillingUser: ["GET /users/{username}/settings/billing/shared-storage"] + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs", { + mediaType: { + previews: ["antiope"] + } + }], + createSuite: ["POST /repos/{owner}/{repo}/check-suites", { + mediaType: { + previews: ["antiope"] + } + }], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}", { + mediaType: { + previews: ["antiope"] + } + }], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}", { + mediaType: { + previews: ["antiope"] + } + }], + listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", { + mediaType: { + previews: ["antiope"] + } + }], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs", { + mediaType: { + previews: ["antiope"] + } + }], + listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", { + mediaType: { + previews: ["antiope"] + } + }], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites", { + mediaType: { + previews: ["antiope"] + } + }], + rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", { + mediaType: { + previews: ["antiope"] + } + }], + setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences", { + mediaType: { + previews: ["antiope"] + } + }], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}", { + mediaType: { + previews: ["antiope"] + } + }] + }, + codeScanning: { + getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_id}"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"] + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct", { + mediaType: { + previews: ["scarlet-witch"] + } + }], + getConductCode: ["GET /codes_of_conduct/{key}", { + mediaType: { + previews: ["scarlet-witch"] + } + }], + getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", { + mediaType: { + previews: ["scarlet-witch"] + } + }] + }, + emojis: { + get: ["GET /emojis"] + }, + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"] + }, + interactions: { + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }], + removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }] + }, + issues: { + addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", { + mediaType: { + previews: ["mockingbird"] + } + }], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], + removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"] + }, + markdown: { + render: ["POST /markdown"], + renderRaw: ["POST /markdown/raw", { + headers: { + "content-type": "text/plain; charset=utf-8" + } + }] + }, + meta: { + get: ["GET /meta"] + }, + migrations: { + cancelImport: ["DELETE /repos/{owner}/{repo}/import"], + deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], + getImportStatus: ["GET /repos/{owner}/{repo}/import"], + getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", { + mediaType: { + previews: ["wyandotte"] + } + }], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", { + mediaType: { + previews: ["wyandotte"] + } + }], + listForAuthenticatedUser: ["GET /user/migrations", { + mediaType: { + previews: ["wyandotte"] + } + }], + listForOrg: ["GET /orgs/{org}/migrations", { + mediaType: { + previews: ["wyandotte"] + } + }], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", { + mediaType: { + previews: ["wyandotte"] + } + }], + listReposForUser: ["GET /user/{migration_id}/repositories", { + mediaType: { + previews: ["wyandotte"] + } + }], + mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], + setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: ["PUT /repos/{owner}/{repo}/import"], + unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", { + mediaType: { + previews: ["wyandotte"] + } + }], + unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", { + mediaType: { + previews: ["wyandotte"] + } + }], + updateImport: ["PATCH /repos/{owner}/{repo}/import"] + }, + orgs: { + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], + createInvitation: ["POST /orgs/{org}/invitations"], + createWebhook: ["POST /orgs/{org}/hooks"], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + get: ["GET /orgs/{org}"], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations", { + mediaType: { + previews: ["machine-man"] + } + }], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listWebhooks: ["GET /orgs/{org}/hooks"], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], + removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"] + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", { + mediaType: { + previews: ["inertia"] + } + }], + createCard: ["POST /projects/columns/{column_id}/cards", { + mediaType: { + previews: ["inertia"] + } + }], + createColumn: ["POST /projects/{project_id}/columns", { + mediaType: { + previews: ["inertia"] + } + }], + createForAuthenticatedUser: ["POST /user/projects", { + mediaType: { + previews: ["inertia"] + } + }], + createForOrg: ["POST /orgs/{org}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + createForRepo: ["POST /repos/{owner}/{repo}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + delete: ["DELETE /projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + deleteCard: ["DELETE /projects/columns/cards/{card_id}", { + mediaType: { + previews: ["inertia"] + } + }], + deleteColumn: ["DELETE /projects/columns/{column_id}", { + mediaType: { + previews: ["inertia"] + } + }], + get: ["GET /projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + getCard: ["GET /projects/columns/cards/{card_id}", { + mediaType: { + previews: ["inertia"] + } + }], + getColumn: ["GET /projects/columns/{column_id}", { + mediaType: { + previews: ["inertia"] + } + }], + getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", { + mediaType: { + previews: ["inertia"] + } + }], + listCards: ["GET /projects/columns/{column_id}/cards", { + mediaType: { + previews: ["inertia"] + } + }], + listCollaborators: ["GET /projects/{project_id}/collaborators", { + mediaType: { + previews: ["inertia"] + } + }], + listColumns: ["GET /projects/{project_id}/columns", { + mediaType: { + previews: ["inertia"] + } + }], + listForOrg: ["GET /orgs/{org}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + listForRepo: ["GET /repos/{owner}/{repo}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + listForUser: ["GET /users/{username}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + moveCard: ["POST /projects/columns/cards/{card_id}/moves", { + mediaType: { + previews: ["inertia"] + } + }], + moveColumn: ["POST /projects/columns/{column_id}/moves", { + mediaType: { + previews: ["inertia"] + } + }], + removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}", { + mediaType: { + previews: ["inertia"] + } + }], + update: ["PATCH /projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + updateCard: ["PATCH /projects/columns/cards/{card_id}", { + mediaType: { + previews: ["inertia"] + } + }], + updateColumn: ["PATCH /projects/columns/{column_id}", { + mediaType: { + previews: ["inertia"] + } + }] + }, + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", { + mediaType: { + previews: ["lydian"] + } + }], + updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] + }, + rateLimit: { + get: ["GET /rate_limit"] + }, + reactions: { + createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteLegacy: ["DELETE /reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }, { + deprecated: "octokit.reactions.deleteLegacy() is deprecated, see https://developer.github.com/v3/reactions/#delete-a-reaction-legacy" + }], + listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }] + }, + repos: { + acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"], + addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages", { + mediaType: { + previews: ["switcheroo"] + } + }], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate", { + mediaType: { + previews: ["baptiste"] + } + }], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], + deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { + mediaType: { + previews: ["switcheroo"] + } + }], + deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes", { + mediaType: { + previews: ["london"] + } + }], + disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + downloadArchive: ["GET /repos/{owner}/{repo}/{archive_format}/{ref}"], + enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes", { + mediaType: { + previews: ["london"] + } + }], + enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], + getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], + getAllTopics: ["GET /repos/{owner}/{repo}/topics", { + mediaType: { + previews: ["mercy"] + } + }], + getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile", { + mediaType: { + previews: ["black-panther"] + } + }], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", { + mediaType: { + previews: ["groot"] + } + }], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", { + mediaType: { + previews: ["groot"] + } + }], + listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], + removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", { + mediaType: { + previews: ["mercy"] + } + }], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], + updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], + updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { + baseUrl: "https://uploads.github.com" + }] + }, + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits", { + mediaType: { + previews: ["cloak"] + } + }], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics", { + mediaType: { + previews: ["mercy"] + } + }], + users: ["GET /search/users"] + }, + teams: { + addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], + addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], + removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], + removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + }, + users: { + addEmailForAuthenticated: ["POST /user/emails"], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKeyForAuthenticated: ["POST /user/gpg_keys"], + createPublicSshKeyForAuthenticated: ["POST /user/keys"], + deleteEmailForAuthenticated: ["DELETE /user/emails"], + deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"], + list: ["GET /users"], + listBlockedByAuthenticated: ["GET /user/blocks"], + listEmailsForAuthenticated: ["GET /user/emails"], + listFollowedByAuthenticated: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeysForAuthenticated: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmailsForAuthenticated: ["GET /user/public_emails"], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: ["GET /user/keys"], + setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"] } -} - -function isDefined(value) { - return value !== undefined && value !== null; -} +}; -function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; -} +const VERSION = "4.1.0"; -function getValues(context, operator, key, modifier) { - var value = context[key], - result = []; +function endpointsToMethods(octokit, endpointsMap) { + const newMethods = {}; - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); + for (const [scope, endpoints] of Object.entries(endpointsMap)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign({ + method, + url + }, defaults); - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); + if (!newMethods[scope]) { + newMethods[scope] = {}; } - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); - } - }); - } - } else { - const tmp = []; - - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - tmp.push(encodeValue(operator, value)); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); - } - }); - } + const scopeMethods = newMethods[scope]; - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); - } - } - } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); + if (decorations) { + scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); + continue; } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); + + scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); } } - return result; -} - -function parseUrl(template) { - return { - expand: expand.bind(null, template) - }; + return newMethods; } -function expand(template, context) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); + /* istanbul ignore next */ - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } + function withDecorations(...args) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` - expression.split(/,/g).forEach(function (variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: undefined }); + return requestWithDefaults(options); + } - if (operator && operator !== "+") { - var separator = ","; - - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); + } - return (values.length !== 0 ? operator : "") + values.join(separator); - } else { - return values.join(","); - } - } else { - return encodeReserved(literal); + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); } - }); -} -function parse(options) { - // https://fetch.spec.whatwg.org/#methods - let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible + if (decorations.renamedParameters) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + const options = requestWithDefaults.endpoint.merge(...args); - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{+$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later + for (const [name, alias] of Object.entries(decorations.renamedParameters)) { + if (name in options) { + octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); + if (!(alias in options)) { + options[alias] = options[name]; + } - if (!/^http/.test(url)) { - url = options.baseUrl + url; + delete options[name]; + } + } + + return requestWithDefaults(options); + } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + + + return requestWithDefaults(...args); } - const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequset = /application\/octet-stream/i.test(headers.accept); + return Object.assign(withDecorations, requestWithDefaults); +} - if (!isBinaryRequset) { - if (options.mediaType.format) { - // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw - headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); - } +/** + * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary + * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is + * done, we will remove the registerEndpoints methods and return the methods + * directly as with the other plugins. At that point we will also remove the + * legacy workarounds and deprecations. + * + * See the plan at + * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1 + */ - if (options.mediaType.previews.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); - } - } // for GET/HEAD requests, set URL query parameters from remaining parameters - // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters +function restEndpointMethods(octokit) { + return endpointsToMethods(octokit, Endpoints); +} +restEndpointMethods.VERSION = VERSION; + +exports.restEndpointMethods = restEndpointMethods; +//# sourceMappingURL=index.js.map - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; - } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; - } else { - headers["content-length"] = 0; - } - } - } // default content-type for JSON if body is set +/***/ }), +/***/ 537: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; - } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. - // fetch does not allow to set `content-length` header, but we can set body to an empty string +"use strict"; - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; - } // Only return body/request keys if present +Object.defineProperty(exports, "__esModule", ({ value: true })); +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - return Object.assign({ - method, - url, - headers - }, typeof body !== "undefined" ? { - body - } : null, options.request ? { - request: options.request - } : null); -} +var deprecation = __webpack_require__(8932); +var once = _interopDefault(__webpack_require__(1223)); -function endpointWithDefaults(defaults, route, options) { - return parse(merge(defaults, route, options)); -} +const logOnce = once(deprecation => console.warn(deprecation)); +/** + * Error with extra properties to help with debugging + */ -function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS = merge(oldDefaults, newDefaults); - const endpoint = endpointWithDefaults.bind(null, DEFAULTS); - return Object.assign(endpoint, { - DEFAULTS, - defaults: withDefaults.bind(null, DEFAULTS), - merge: merge.bind(null, DEFAULTS), - parse - }); -} +class RequestError extends Error { + constructor(message, statusCode, options) { + super(message); // Maintains proper stack trace (only available on V8) -const VERSION = "6.0.3"; + /* istanbul ignore next */ -const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. -// So we use RequestParameters and add method as additional required property. + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } -const DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "", - previews: [] + this.name = "HttpError"; + this.status = statusCode; + Object.defineProperty(this, "code", { + get() { + logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; + } + + }); + this.headers = options.headers || {}; // redact request credentials without mutating original request options + + const requestCopy = Object.assign({}, options.request); + + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") + }); + } + + requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit + // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications + .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended + // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header + .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; } -}; -const endpoint = withDefaults(null, DEFAULTS); +} -exports.endpoint = endpoint; +exports.RequestError = RequestError; //# sourceMappingURL=index.js.map /***/ }), -/***/ 443: -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ 6234: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -const url = __webpack_require__(835); -function getProxyUrl(reqUrl) { - let usingSsl = reqUrl.protocol === 'https:'; - let proxyUrl; - if (checkBypass(reqUrl)) { - return proxyUrl; - } - let proxyVar; - if (usingSsl) { - proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; - } - else { - proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; - } - if (proxyVar) { - proxyUrl = url.parse(proxyVar); - } - return proxyUrl; + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var endpoint = __webpack_require__(9440); +var universalUserAgent = __webpack_require__(5030); +var isPlainObject = _interopDefault(__webpack_require__(8840)); +var nodeFetch = _interopDefault(__webpack_require__(467)); +var requestError = __webpack_require__(537); + +const VERSION = "5.4.5"; + +function getBufferResponse(response) { + return response.arrayBuffer(); } -exports.getProxyUrl = getProxyUrl; -function checkBypass(reqUrl) { - if (!reqUrl.hostname) { - return false; - } - let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; - if (!noProxy) { - return false; - } - // Determine the request port - let reqPort; - if (reqUrl.port) { - reqPort = Number(reqUrl.port); - } - else if (reqUrl.protocol === 'http:') { - reqPort = 80; - } - else if (reqUrl.protocol === 'https:') { - reqPort = 443; - } - // Format the request hostname and hostname with port - let upperReqHosts = [reqUrl.hostname.toUpperCase()]; - if (typeof reqPort === 'number') { - upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); - } - // Compare request host against noproxy - for (let upperNoProxyItem of noProxy - .split(',') - .map(x => x.trim().toUpperCase()) - .filter(x => x)) { - if (upperReqHosts.some(x => x === upperNoProxyItem)) { - return true; - } + +function fetchWrapper(requestOptions) { + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + + let headers = {}; + let status; + let url; + const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; + return fetch(requestOptions.url, Object.assign({ + method: requestOptions.method, + body: requestOptions.body, + headers: requestOptions.headers, + redirect: requestOptions.redirect + }, requestOptions.request)).then(response => { + url = response.url; + status = response.status; + + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; } - return false; -} -exports.checkBypass = checkBypass; + if (status === 204 || status === 205) { + return; + } // GitHub API returns 200 for HEAD requests -/***/ }), -/***/ 447: -/***/ (function(module, __unusedexports, __webpack_require__) { + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } -"use strict"; + throw new requestError.RequestError(response.statusText, status, { + headers, + request: requestOptions + }); + } -const path = __webpack_require__(622); -const childProcess = __webpack_require__(129); -const crossSpawn = __webpack_require__(746); -const stripEof = __webpack_require__(774); -const npmRunPath = __webpack_require__(502); -const isStream = __webpack_require__(554); -const _getStream = __webpack_require__(766); -const pFinally = __webpack_require__(330); -const onExit = __webpack_require__(931); -const errname = __webpack_require__(689); -const stdio = __webpack_require__(166); + if (status === 304) { + throw new requestError.RequestError("Not modified", status, { + headers, + request: requestOptions + }); + } -const TEN_MEGABYTES = 1000 * 1000 * 10; + if (status >= 400) { + return response.text().then(message => { + const error = new requestError.RequestError(message, status, { + headers, + request: requestOptions + }); -function handleArgs(cmd, args, opts) { - let parsed; + try { + let responseBody = JSON.parse(error.message); + Object.assign(error, responseBody); + let errors = responseBody.errors; // Assumption `errors` would always be in Array format - opts = Object.assign({ - extendEnv: true, - env: {} - }, opts); + error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); + } catch (e) {// ignore, see octokit/rest.js#684 + } - if (opts.extendEnv) { - opts.env = Object.assign({}, process.env, opts.env); - } + throw error; + }); + } - if (opts.__winShell === true) { - delete opts.__winShell; - parsed = { - command: cmd, - args, - options: opts, - file: cmd, - original: { - cmd, - args - } - }; - } else { - parsed = crossSpawn._parse(cmd, args, opts); - } + const contentType = response.headers.get("content-type"); - opts = Object.assign({ - maxBuffer: TEN_MEGABYTES, - buffer: true, - stripEof: true, - preferLocal: true, - localDir: parsed.options.cwd || process.cwd(), - encoding: 'utf8', - reject: true, - cleanup: true - }, parsed.options); + if (/application\/json/.test(contentType)) { + return response.json(); + } + + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + + return getBufferResponse(response); + }).then(data => { + return { + status, + url, + headers, + data + }; + }).catch(error => { + if (error instanceof requestError.RequestError) { + throw error; + } - opts.stdio = stdio(opts); + throw new requestError.RequestError(error.message, 500, { + headers, + request: requestOptions + }); + }); +} - if (opts.preferLocal) { - opts.env = npmRunPath.env(Object.assign({}, opts, {cwd: opts.localDir})); - } +function withDefaults(oldEndpoint, newDefaults) { + const endpoint = oldEndpoint.defaults(newDefaults); - if (opts.detached) { - // #115 - opts.cleanup = false; - } + const newApi = function (route, parameters) { + const endpointOptions = endpoint.merge(route, parameters); - if (process.platform === 'win32' && path.basename(parsed.command) === 'cmd.exe') { - // #116 - parsed.args.unshift('/q'); - } + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint.parse(endpointOptions)); + } - return { - cmd: parsed.command, - args: parsed.args, - opts, - parsed - }; -} + const request = (route, parameters) => { + return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); + }; -function handleInput(spawned, input) { - if (input === null || input === undefined) { - return; - } + Object.assign(request, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); + return endpointOptions.request.hook(request, endpointOptions); + }; - if (isStream(input)) { - input.pipe(spawned.stdin); - } else { - spawned.stdin.end(input); - } + return Object.assign(newApi, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); } -function handleOutput(opts, val) { - if (val && opts.stripEof) { - val = stripEof(val); - } - - return val; -} +const request = withDefaults(endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` + } +}); -function handleShell(fn, cmd, opts) { - let file = '/bin/sh'; - let args = ['-c', cmd]; +exports.request = request; +//# sourceMappingURL=index.js.map - opts = Object.assign({}, opts); - if (process.platform === 'win32') { - opts.__winShell = true; - file = process.env.comspec || 'cmd.exe'; - args = ['/s', '/c', `"${cmd}"`]; - opts.windowsVerbatimArguments = true; - } +/***/ }), - if (opts.shell) { - file = opts.shell; - delete opts.shell; - } +/***/ 3682: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { - return fn(file, args, opts); -} +var register = __webpack_require__(4670) +var addHook = __webpack_require__(5549) +var removeHook = __webpack_require__(6819) -function getStream(process, stream, {encoding, buffer, maxBuffer}) { - if (!process[stream]) { - return null; - } +// bind with array of arguments: https://stackoverflow.com/a/21792913 +var bind = Function.bind +var bindable = bind.bind(bind) - let ret; +function bindApi (hook, state, name) { + var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) + hook.api = { remove: removeHookRef } + hook.remove = removeHookRef - if (!buffer) { - // TODO: Use `ret = util.promisify(stream.finished)(process[stream]);` when targeting Node.js 10 - ret = new Promise((resolve, reject) => { - process[stream] - .once('end', resolve) - .once('error', reject); - }); - } else if (encoding) { - ret = _getStream(process[stream], { - encoding, - maxBuffer - }); - } else { - ret = _getStream.buffer(process[stream], {maxBuffer}); - } + ;['before', 'error', 'after', 'wrap'].forEach(function (kind) { + var args = name ? [state, kind, name] : [state, kind] + hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) + }) +} - return ret.catch(err => { - err.stream = stream; - err.message = `${stream} ${err.message}`; - throw err; - }); +function HookSingular () { + var singularHookName = 'h' + var singularHookState = { + registry: {} + } + var singularHook = register.bind(null, singularHookState, singularHookName) + bindApi(singularHook, singularHookState, singularHookName) + return singularHook } -function makeError(result, options) { - const {stdout, stderr} = result; +function HookCollection () { + var state = { + registry: {} + } - let err = result.error; - const {code, signal} = result; + var hook = register.bind(null, state) + bindApi(hook, state) - const {parsed, joinedCmd} = options; - const timedOut = options.timedOut || false; + return hook +} - if (!err) { - let output = ''; +var collectionHookDeprecationMessageDisplayed = false +function Hook () { + if (!collectionHookDeprecationMessageDisplayed) { + console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') + collectionHookDeprecationMessageDisplayed = true + } + return HookCollection() +} - if (Array.isArray(parsed.opts.stdio)) { - if (parsed.opts.stdio[2] !== 'inherit') { - output += output.length > 0 ? stderr : `\n${stderr}`; - } +Hook.Singular = HookSingular.bind() +Hook.Collection = HookCollection.bind() - if (parsed.opts.stdio[1] !== 'inherit') { - output += `\n${stdout}`; - } - } else if (parsed.opts.stdio !== 'inherit') { - output = `\n${stderr}${stdout}`; - } +module.exports = Hook +// expose constructors as a named property for TypeScript +module.exports.Hook = Hook +module.exports.Singular = Hook.Singular +module.exports.Collection = Hook.Collection - err = new Error(`Command failed: ${joinedCmd}${output}`); - err.code = code < 0 ? errname(code) : code; - } - err.stdout = stdout; - err.stderr = stderr; - err.failed = true; - err.signal = signal || null; - err.cmd = joinedCmd; - err.timedOut = timedOut; +/***/ }), - return err; -} +/***/ 5549: +/***/ ((module) => { -function joinCmd(cmd, args) { - let joinedCmd = cmd; +module.exports = addHook - if (Array.isArray(args) && args.length > 0) { - joinedCmd += ' ' + args.join(' '); - } +function addHook (state, kind, name, hook) { + var orig = hook + if (!state.registry[name]) { + state.registry[name] = [] + } - return joinedCmd; -} + if (kind === 'before') { + hook = function (method, options) { + return Promise.resolve() + .then(orig.bind(null, options)) + .then(method.bind(null, options)) + } + } -module.exports = (cmd, args, opts) => { - const parsed = handleArgs(cmd, args, opts); - const {encoding, buffer, maxBuffer} = parsed.opts; - const joinedCmd = joinCmd(cmd, args); + if (kind === 'after') { + hook = function (method, options) { + var result + return Promise.resolve() + .then(method.bind(null, options)) + .then(function (result_) { + result = result_ + return orig(result, options) + }) + .then(function () { + return result + }) + } + } - let spawned; - try { - spawned = childProcess.spawn(parsed.cmd, parsed.args, parsed.opts); - } catch (err) { - return Promise.reject(err); - } + if (kind === 'error') { + hook = function (method, options) { + return Promise.resolve() + .then(method.bind(null, options)) + .catch(function (error) { + return orig(error, options) + }) + } + } - let removeExitHandler; - if (parsed.opts.cleanup) { - removeExitHandler = onExit(() => { - spawned.kill(); - }); - } + state.registry[name].push({ + hook: hook, + orig: orig + }) +} - let timeoutId = null; - let timedOut = false; - const cleanup = () => { - if (timeoutId) { - clearTimeout(timeoutId); - timeoutId = null; - } +/***/ }), - if (removeExitHandler) { - removeExitHandler(); - } - }; +/***/ 4670: +/***/ ((module) => { - if (parsed.opts.timeout > 0) { - timeoutId = setTimeout(() => { - timeoutId = null; - timedOut = true; - spawned.kill(parsed.opts.killSignal); - }, parsed.opts.timeout); - } +module.exports = register - const processDone = new Promise(resolve => { - spawned.on('exit', (code, signal) => { - cleanup(); - resolve({code, signal}); - }); +function register (state, name, method, options) { + if (typeof method !== 'function') { + throw new Error('method for before hook must be a function') + } - spawned.on('error', err => { - cleanup(); - resolve({error: err}); - }); + if (!options) { + options = {} + } - if (spawned.stdin) { - spawned.stdin.on('error', err => { - cleanup(); - resolve({error: err}); - }); - } - }); + if (Array.isArray(name)) { + return name.reverse().reduce(function (callback, name) { + return register.bind(null, state, name, callback, options) + }, method)() + } - function destroy() { - if (spawned.stdout) { - spawned.stdout.destroy(); - } + return Promise.resolve() + .then(function () { + if (!state.registry[name]) { + return method(options) + } - if (spawned.stderr) { - spawned.stderr.destroy(); - } - } + return (state.registry[name]).reduce(function (method, registered) { + return registered.hook.bind(null, method, options) + }, method)() + }) +} - const handlePromise = () => pFinally(Promise.all([ - processDone, - getStream(spawned, 'stdout', {encoding, buffer, maxBuffer}), - getStream(spawned, 'stderr', {encoding, buffer, maxBuffer}) - ]).then(arr => { - const result = arr[0]; - result.stdout = arr[1]; - result.stderr = arr[2]; - if (result.error || result.code !== 0 || result.signal !== null) { - const err = makeError(result, { - joinedCmd, - parsed, - timedOut - }); +/***/ }), - // TODO: missing some timeout logic for killed - // https://github.com/nodejs/node/blob/master/lib/child_process.js#L203 - // err.killed = spawned.killed || killed; - err.killed = err.killed || spawned.killed; +/***/ 6819: +/***/ ((module) => { - if (!parsed.opts.reject) { - return err; - } +module.exports = removeHook - throw err; - } +function removeHook (state, name, method) { + if (!state.registry[name]) { + return + } - return { - stdout: handleOutput(parsed.opts, result.stdout), - stderr: handleOutput(parsed.opts, result.stderr), - code: 0, - failed: false, - killed: false, - signal: null, - cmd: joinedCmd, - timedOut: false - }; - }), destroy); + var index = state.registry[name] + .map(function (registered) { return registered.orig }) + .indexOf(method) - crossSpawn._enoent.hookChildProcess(spawned, parsed.parsed); + if (index === -1) { + return + } - handleInput(spawned, parsed.opts.input); + state.registry[name].splice(index, 1) +} - spawned.then = (onfulfilled, onrejected) => handlePromise().then(onfulfilled, onrejected); - spawned.catch = onrejected => handlePromise().catch(onrejected); - return spawned; -}; +/***/ }), -// TODO: set `stderr: 'ignore'` when that option is implemented -module.exports.stdout = (...args) => module.exports(...args).then(x => x.stdout); +/***/ 2746: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { -// TODO: set `stdout: 'ignore'` when that option is implemented -module.exports.stderr = (...args) => module.exports(...args).then(x => x.stderr); +"use strict"; -module.exports.shell = (cmd, opts) => handleShell(module.exports, cmd, opts); -module.exports.sync = (cmd, args, opts) => { - const parsed = handleArgs(cmd, args, opts); - const joinedCmd = joinCmd(cmd, args); +const cp = __webpack_require__(3129); +const parse = __webpack_require__(6855); +const enoent = __webpack_require__(4101); - if (isStream(parsed.opts.input)) { - throw new TypeError('The `input` option cannot be a stream in sync mode'); - } +function spawn(command, args, options) { + // Parse the arguments + const parsed = parse(command, args, options); - const result = childProcess.spawnSync(parsed.cmd, parsed.args, parsed.opts); - result.code = result.status; + // Spawn the child process + const spawned = cp.spawn(parsed.command, parsed.args, parsed.options); - if (result.error || result.status !== 0 || result.signal !== null) { - const err = makeError(result, { - joinedCmd, - parsed - }); + // Hook into child process "exit" event to emit an error if the command + // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 + enoent.hookChildProcess(spawned, parsed); - if (!parsed.opts.reject) { - return err; - } + return spawned; +} - throw err; - } +function spawnSync(command, args, options) { + // Parse the arguments + const parsed = parse(command, args, options); - return { - stdout: handleOutput(parsed.opts, result.stdout), - stderr: handleOutput(parsed.opts, result.stderr), - code: 0, - failed: false, - signal: null, - cmd: joinedCmd, - timedOut: false - }; -}; + // Spawn the child process + const result = cp.spawnSync(parsed.command, parsed.args, parsed.options); -module.exports.shellSync = (cmd, opts) => handleShell(module.exports.sync, cmd, opts); + // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 + result.error = result.error || enoent.verifyENOENTSync(result.status, parsed); + + return result; +} + +module.exports = spawn; +module.exports.spawn = spawn; +module.exports.sync = spawnSync; + +module.exports._parse = parse; +module.exports._enoent = enoent; /***/ }), -/***/ 467: -/***/ (function(module, exports, __webpack_require__) { +/***/ 4101: +/***/ ((module) => { "use strict"; -Object.defineProperty(exports, '__esModule', { value: true }); +const isWin = process.platform === 'win32'; -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } +function notFoundError(original, syscall) { + return Object.assign(new Error(`${syscall} ${original.command} ENOENT`), { + code: 'ENOENT', + errno: 'ENOENT', + syscall: `${syscall} ${original.command}`, + path: original.command, + spawnargs: original.args, + }); +} -var Stream = _interopDefault(__webpack_require__(413)); -var http = _interopDefault(__webpack_require__(605)); -var Url = _interopDefault(__webpack_require__(835)); -var https = _interopDefault(__webpack_require__(211)); -var zlib = _interopDefault(__webpack_require__(761)); +function hookChildProcess(cp, parsed) { + if (!isWin) { + return; + } -// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + const originalEmit = cp.emit; -// fix for "Readable" isn't a named export issue -const Readable = Stream.Readable; + cp.emit = function (name, arg1) { + // If emitting "exit" event and exit code is 1, we need to check if + // the command exists and emit an "error" instead + // See https://github.com/IndigoUnited/node-cross-spawn/issues/16 + if (name === 'exit') { + const err = verifyENOENT(arg1, parsed, 'spawn'); -const BUFFER = Symbol('buffer'); -const TYPE = Symbol('type'); + if (err) { + return originalEmit.call(cp, 'error', err); + } + } -class Blob { - constructor() { - this[TYPE] = ''; + return originalEmit.apply(cp, arguments); // eslint-disable-line prefer-rest-params + }; +} - const blobParts = arguments[0]; - const options = arguments[1]; +function verifyENOENT(status, parsed) { + if (isWin && status === 1 && !parsed.file) { + return notFoundError(parsed.original, 'spawn'); + } - const buffers = []; - let size = 0; + return null; +} - if (blobParts) { - const a = blobParts; - const length = Number(a.length); - for (let i = 0; i < length; i++) { - const element = a[i]; - let buffer; - if (element instanceof Buffer) { - buffer = element; - } else if (ArrayBuffer.isView(element)) { - buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); - } else if (element instanceof ArrayBuffer) { - buffer = Buffer.from(element); - } else if (element instanceof Blob) { - buffer = element[BUFFER]; - } else { - buffer = Buffer.from(typeof element === 'string' ? element : String(element)); - } - size += buffer.length; - buffers.push(buffer); - } - } +function verifyENOENTSync(status, parsed) { + if (isWin && status === 1 && !parsed.file) { + return notFoundError(parsed.original, 'spawnSync'); + } - this[BUFFER] = Buffer.concat(buffers); + return null; +} - let type = options && options.type !== undefined && String(options.type).toLowerCase(); - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type; - } - } - get size() { - return this[BUFFER].length; - } - get type() { - return this[TYPE]; - } - text() { - return Promise.resolve(this[BUFFER].toString()); - } - arrayBuffer() { - const buf = this[BUFFER]; - const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - return Promise.resolve(ab); - } - stream() { - const readable = new Readable(); - readable._read = function () {}; - readable.push(this[BUFFER]); - readable.push(null); - return readable; - } - toString() { - return '[object Blob]'; - } - slice() { - const size = this.size; +module.exports = { + hookChildProcess, + verifyENOENT, + verifyENOENTSync, + notFoundError, +}; - const start = arguments[0]; - const end = arguments[1]; - let relativeStart, relativeEnd; - if (start === undefined) { - relativeStart = 0; - } else if (start < 0) { - relativeStart = Math.max(size + start, 0); - } else { - relativeStart = Math.min(start, size); - } - if (end === undefined) { - relativeEnd = size; - } else if (end < 0) { - relativeEnd = Math.max(size + end, 0); - } else { - relativeEnd = Math.min(end, size); - } - const span = Math.max(relativeEnd - relativeStart, 0); - const buffer = this[BUFFER]; - const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); - const blob = new Blob([], { type: arguments[2] }); - blob[BUFFER] = slicedBuffer; - return blob; - } -} +/***/ }), -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}); +/***/ 6855: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { -Object.defineProperty(Blob.prototype, Symbol.toStringTag, { - value: 'Blob', - writable: false, - enumerable: false, - configurable: true -}); +"use strict"; -/** - * fetch-error.js - * - * FetchError interface for operational errors - */ -/** - * Create FetchError instance - * - * @param String message Error message for human - * @param String type Error type for machine - * @param String systemError For Node.js system error - * @return FetchError - */ -function FetchError(message, type, systemError) { - Error.call(this, message); +const path = __webpack_require__(5622); +const niceTry = __webpack_require__(8560); +const resolveCommand = __webpack_require__(7274); +const escape = __webpack_require__(4274); +const readShebang = __webpack_require__(1252); +const semver = __webpack_require__(5911); - this.message = message; - this.type = type; +const isWin = process.platform === 'win32'; +const isExecutableRegExp = /\.(?:com|exe)$/i; +const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i; - // when err.type is `system`, err.code contains system error code - if (systemError) { - this.code = this.errno = systemError.code; - } +// `options.shell` is supported in Node ^4.8.0, ^5.7.0 and >= 6.0.0 +const supportsShellOption = niceTry(() => semver.satisfies(process.version, '^4.8.0 || ^5.7.0 || >= 6.0.0', true)) || false; - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); +function detectShebang(parsed) { + parsed.file = resolveCommand(parsed); + + const shebang = parsed.file && readShebang(parsed.file); + + if (shebang) { + parsed.args.unshift(parsed.file); + parsed.command = shebang; + + return resolveCommand(parsed); + } + + return parsed.file; } -FetchError.prototype = Object.create(Error.prototype); -FetchError.prototype.constructor = FetchError; -FetchError.prototype.name = 'FetchError'; +function parseNonShell(parsed) { + if (!isWin) { + return parsed; + } -let convert; -try { - convert = __webpack_require__(877).convert; -} catch (e) {} + // Detect & add support for shebangs + const commandFile = detectShebang(parsed); -const INTERNALS = Symbol('Body internals'); + // We don't need a shell if the command filename is an executable + const needsShell = !isExecutableRegExp.test(commandFile); -// fix an issue where "PassThrough" isn't a named export for node <10 -const PassThrough = Stream.PassThrough; + // If a shell is required, use cmd.exe and take care of escaping everything correctly + // Note that `forceShell` is an hidden option used only in tests + if (parsed.options.forceShell || needsShell) { + // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/` + // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument + // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called, + // we need to double escape them + const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile); -/** - * Body mixin - * - * Ref: https://fetch.spec.whatwg.org/#body - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -function Body(body) { - var _this = this; + // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar) + // This is necessary otherwise it will always fail with ENOENT in those cases + parsed.command = path.normalize(parsed.command); - var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, - _ref$size = _ref.size; + // Escape command & arguments + parsed.command = escape.command(parsed.command); + parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars)); - let size = _ref$size === undefined ? 0 : _ref$size; - var _ref$timeout = _ref.timeout; - let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + const shellCommand = [parsed.command].concat(parsed.args).join(' '); - if (body == null) { - // body is undefined or null - body = null; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - body = Buffer.from(body.toString()); - } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - body = Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof Stream) ; else { - // none of the above - // coerce to string then buffer - body = Buffer.from(String(body)); - } - this[INTERNALS] = { - body, - disturbed: false, - error: null - }; - this.size = size; - this.timeout = timeout; + parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; + parsed.command = process.env.comspec || 'cmd.exe'; + parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped + } - if (body instanceof Stream) { - body.on('error', function (err) { - const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); - _this[INTERNALS].error = error; - }); - } + return parsed; } -Body.prototype = { - get body() { - return this[INTERNALS].body; - }, - - get bodyUsed() { - return this[INTERNALS].disturbed; - }, +function parseShell(parsed) { + // If node supports the shell option, there's no need to mimic its behavior + if (supportsShellOption) { + return parsed; + } - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - arrayBuffer() { - return consumeBody.call(this).then(function (buf) { - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - }); - }, + // Mimic node shell option + // See https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335 + const shellCommand = [parsed.command].concat(parsed.args).join(' '); - /** - * Return raw response as Blob - * - * @return Promise - */ - blob() { - let ct = this.headers && this.headers.get('content-type') || ''; - return consumeBody.call(this).then(function (buf) { - return Object.assign( - // Prevent copying - new Blob([], { - type: ct.toLowerCase() - }), { - [BUFFER]: buf - }); - }); - }, + if (isWin) { + parsed.command = typeof parsed.options.shell === 'string' ? parsed.options.shell : process.env.comspec || 'cmd.exe'; + parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; + parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped + } else { + if (typeof parsed.options.shell === 'string') { + parsed.command = parsed.options.shell; + } else if (process.platform === 'android') { + parsed.command = '/system/bin/sh'; + } else { + parsed.command = '/bin/sh'; + } - /** - * Decode response as json - * - * @return Promise - */ - json() { - var _this2 = this; + parsed.args = ['-c', shellCommand]; + } - return consumeBody.call(this).then(function (buffer) { - try { - return JSON.parse(buffer.toString()); - } catch (err) { - return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); - } - }); - }, + return parsed; +} - /** - * Decode response as text - * - * @return Promise - */ - text() { - return consumeBody.call(this).then(function (buffer) { - return buffer.toString(); - }); - }, +function parse(command, args, options) { + // Normalize arguments, similar to nodejs + if (args && !Array.isArray(args)) { + options = args; + args = null; + } - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody.call(this); - }, + args = args ? args.slice(0) : []; // Clone array to avoid changing the original + options = Object.assign({}, options); // Clone object to avoid changing the original - /** - * Decode response as text, while automatically detecting the encoding and - * trying to decode to UTF-8 (non-spec api) - * - * @return Promise - */ - textConverted() { - var _this3 = this; + // Build our parsed object + const parsed = { + command, + args, + options, + file: undefined, + original: { + command, + args, + }, + }; - return consumeBody.call(this).then(function (buffer) { - return convertBody(buffer, _this3.headers); - }); - } -}; + // Delegate further parsing to shell or non-shell + return options.shell ? parseShell(parsed) : parseNonShell(parsed); +} -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true } -}); +module.exports = parse; -Body.mixIn = function (proto) { - for (const name of Object.getOwnPropertyNames(Body.prototype)) { - // istanbul ignore else: future proof - if (!(name in proto)) { - const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); - Object.defineProperty(proto, name, desc); - } - } -}; -/** - * Consume and convert an entire Body to a Buffer. - * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body - * - * @return Promise - */ -function consumeBody() { - var _this4 = this; +/***/ }), - if (this[INTERNALS].disturbed) { - return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); - } +/***/ 4274: +/***/ ((module) => { - this[INTERNALS].disturbed = true; +"use strict"; - if (this[INTERNALS].error) { - return Body.Promise.reject(this[INTERNALS].error); - } - let body = this.body; +// See http://www.robvanderwoude.com/escapechars.php +const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g; - // body is null - if (body === null) { - return Body.Promise.resolve(Buffer.alloc(0)); - } +function escapeCommand(arg) { + // Escape meta chars + arg = arg.replace(metaCharsRegExp, '^$1'); - // body is blob - if (isBlob(body)) { - body = body.stream(); - } + return arg; +} - // body is buffer - if (Buffer.isBuffer(body)) { - return Body.Promise.resolve(body); - } +function escapeArgument(arg, doubleEscapeMetaChars) { + // Convert to string + arg = `${arg}`; - // istanbul ignore if: should never happen - if (!(body instanceof Stream)) { - return Body.Promise.resolve(Buffer.alloc(0)); - } + // Algorithm below is based on https://qntm.org/cmd - // body is stream - // get ready to actually consume the body - let accum = []; - let accumBytes = 0; - let abort = false; + // Sequence of backslashes followed by a double quote: + // double up all the backslashes and escape the double quote + arg = arg.replace(/(\\*)"/g, '$1$1\\"'); - return new Body.Promise(function (resolve, reject) { - let resTimeout; + // Sequence of backslashes followed by the end of the string + // (which will become a double quote later): + // double up all the backslashes + arg = arg.replace(/(\\*)$/, '$1$1'); - // allow timeout on slow response body - if (_this4.timeout) { - resTimeout = setTimeout(function () { - abort = true; - reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); - }, _this4.timeout); - } + // All other backslashes occur literally - // handle stream errors - body.on('error', function (err) { - if (err.name === 'AbortError') { - // if the request was aborted, reject with this Error - abort = true; - reject(err); - } else { - // other errors, such as incorrect content-encoding - reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); - } - }); + // Quote the whole thing: + arg = `"${arg}"`; - body.on('data', function (chunk) { - if (abort || chunk === null) { - return; - } + // Escape meta chars + arg = arg.replace(metaCharsRegExp, '^$1'); - if (_this4.size && accumBytes + chunk.length > _this4.size) { - abort = true; - reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); - return; - } + // Double escape meta chars if necessary + if (doubleEscapeMetaChars) { + arg = arg.replace(metaCharsRegExp, '^$1'); + } - accumBytes += chunk.length; - accum.push(chunk); - }); + return arg; +} - body.on('end', function () { - if (abort) { - return; - } +module.exports.command = escapeCommand; +module.exports.argument = escapeArgument; - clearTimeout(resTimeout); - try { - resolve(Buffer.concat(accum, accumBytes)); - } catch (err) { - // handle streams that have accumulated too much data (issue #414) - reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - }); -} +/***/ }), -/** - * Detect buffer encoding and convert to target encoding - * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding - * - * @param Buffer buffer Incoming buffer - * @param String encoding Target encoding - * @return String - */ -function convertBody(buffer, headers) { - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function'); - } +/***/ 1252: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { - const ct = headers.get('content-type'); - let charset = 'utf-8'; - let res, str; +"use strict"; - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct); - } - // no charset in content type, peek at response body for at most 1024 bytes - str = buffer.slice(0, 1024).toString(); +const fs = __webpack_require__(5747); +const shebangCommand = __webpack_require__(7032); - // html5 - if (!res && str) { - res = / { -/** - * Check if `obj` is a W3C `Blob` object (which `File` inherits from) - * @param {*} obj - * @return {boolean} - */ -function isBlob(obj) { - return typeof obj === 'object' && typeof obj.arrayBuffer === 'function' && typeof obj.type === 'string' && typeof obj.stream === 'function' && typeof obj.constructor === 'function' && typeof obj.constructor.name === 'string' && /^(Blob|File)$/.test(obj.constructor.name) && /^(Blob|File)$/.test(obj[Symbol.toStringTag]); -} +"use strict"; -/** - * Clone body given Res/Req instance - * - * @param Mixed instance Response or Request instance - * @return Mixed - */ -function clone(instance) { - let p1, p2; - let body = instance.body; - // don't allow cloning a used body - if (instance.bodyUsed) { - throw new Error('cannot clone body after it is used'); - } +const path = __webpack_require__(5622); +const which = __webpack_require__(4207); +const pathKey = __webpack_require__(539)(); - // check that body is a stream and not form-data object - // note: we can't clone the form-data object without having it as a dependency - if (body instanceof Stream && typeof body.getBoundary !== 'function') { - // tee instance body - p1 = new PassThrough(); - p2 = new PassThrough(); - body.pipe(p1); - body.pipe(p2); - // set instance body to teed body and return the other teed body - instance[INTERNALS].body = p1; - body = p2; - } +function resolveCommandAttempt(parsed, withoutPathExt) { + const cwd = process.cwd(); + const hasCustomCwd = parsed.options.cwd != null; - return body; -} + // If a custom `cwd` was specified, we need to change the process cwd + // because `which` will do stat calls but does not support a custom cwd + if (hasCustomCwd) { + try { + process.chdir(parsed.options.cwd); + } catch (err) { + /* Empty */ + } + } -/** - * Performs the operation "extract a `Content-Type` value from |object|" as - * specified in the specification: - * https://fetch.spec.whatwg.org/#concept-bodyinit-extract - * - * This function assumes that instance.body is present. - * - * @param Mixed instance Any options.body input - */ -function extractContentType(body) { - if (body === null) { - // body is null - return null; - } else if (typeof body === 'string') { - // body is string - return 'text/plain;charset=UTF-8'; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - return 'application/x-www-form-urlencoded;charset=UTF-8'; - } else if (isBlob(body)) { - // body is blob - return body.type || null; - } else if (Buffer.isBuffer(body)) { - // body is buffer - return null; - } else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - return null; - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - return null; - } else if (typeof body.getBoundary === 'function') { - // detect form data input from form-data module - return `multipart/form-data;boundary=${body.getBoundary()}`; - } else if (body instanceof Stream) { - // body is stream - // can't really do much about this - return null; - } else { - // Body constructor defaults other things to string - return 'text/plain;charset=UTF-8'; - } -} + let resolved; -/** - * The Fetch Standard treats this as if "total bytes" is a property on the body. - * For us, we have to explicitly get it with a function. - * - * ref: https://fetch.spec.whatwg.org/#concept-body-total-bytes - * - * @param Body instance Instance of Body - * @return Number? Number of bytes, or null if not possible - */ -function getTotalBytes(instance) { - const body = instance.body; + try { + resolved = which.sync(parsed.command, { + path: (parsed.options.env || process.env)[pathKey], + pathExt: withoutPathExt ? path.delimiter : undefined, + }); + } catch (e) { + /* Empty */ + } finally { + process.chdir(cwd); + } + // If we successfully resolved, ensure that an absolute path is returned + // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it + if (resolved) { + resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved); + } - if (body === null) { - // body is null - return 0; - } else if (isBlob(body)) { - return body.size; - } else if (Buffer.isBuffer(body)) { - // body is buffer - return body.length; - } else if (body && typeof body.getLengthSync === 'function') { - // detect form data input from form-data module - if (body._lengthRetrievers && body._lengthRetrievers.length == 0 || // 1.x - body.hasKnownLength && body.hasKnownLength()) { - // 2.x - return body.getLengthSync(); - } - return null; - } else { - // body is stream - return null; - } + return resolved; } -/** - * Write a Body to a Node.js WritableStream (e.g. http.Request) object. - * - * @param Body instance Instance of Body - * @return Void - */ -function writeToStream(dest, instance) { - const body = instance.body; +function resolveCommand(parsed) { + return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true); +} +module.exports = resolveCommand; - if (body === null) { - // body is null - dest.end(); - } else if (isBlob(body)) { - body.stream().pipe(dest); - } else if (Buffer.isBuffer(body)) { - // body is buffer - dest.write(body); - dest.end(); - } else { - // body is stream - body.pipe(dest); - } -} -// expose Promise -Body.Promise = global.Promise; +/***/ }), -/** - * headers.js - * - * Headers class offers convenient helpers - */ +/***/ 8932: +/***/ ((__unused_webpack_module, exports) => { -const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/; -const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/; +"use strict"; -function validateName(name) { - name = `${name}`; - if (invalidTokenRegex.test(name) || name === '') { - throw new TypeError(`${name} is not a legal HTTP header name`); - } -} -function validateValue(value) { - value = `${value}`; - if (invalidHeaderCharRegex.test(value)) { - throw new TypeError(`${value} is not a legal HTTP header value`); - } -} +Object.defineProperty(exports, "__esModule", ({ value: true })); -/** - * Find the key in the map object given a header name. - * - * Returns undefined if not found. - * - * @param String name Header name - * @return String|Undefined - */ -function find(map, name) { - name = name.toLowerCase(); - for (const key in map) { - if (key.toLowerCase() === name) { - return key; - } - } - return undefined; -} +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) -const MAP = Symbol('map'); -class Headers { - /** - * Headers class - * - * @param Object headers Response headers - * @return Void - */ - constructor() { - let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined; + /* istanbul ignore next */ - this[MAP] = Object.create(null); + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } - if (init instanceof Headers) { - const rawHeaders = init.raw(); - const headerNames = Object.keys(rawHeaders); + this.name = 'Deprecation'; + } - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value); - } - } +} - return; - } +exports.Deprecation = Deprecation; - // We don't worry about converting prop to ByteString here as append() - // will handle it. - if (init == null) ; else if (typeof init === 'object') { - const method = init[Symbol.iterator]; - if (method != null) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = []; - for (const pair of init) { - if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable'); - } - pairs.push(Array.from(pair)); - } +/***/ }), - for (const pair of pairs) { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - this.append(pair[0], pair[1]); - } - } else { - // record - for (const key of Object.keys(init)) { - const value = init[key]; - this.append(key, value); - } - } - } else { - throw new TypeError('Provided initializer must be an object'); - } - } +/***/ 1205: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { - /** - * Return combined header value given name - * - * @param String name Header name - * @return Mixed - */ - get(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key === undefined) { - return null; - } +var once = __webpack_require__(1223); - return this[MAP][key].join(', '); - } +var noop = function() {}; - /** - * Iterate over all headers - * - * @param Function callback Executed for each item with parameters (value, name, thisArg) - * @param Boolean thisArg `this` context for callback function - * @return Void - */ - forEach(callback) { - let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; - let pairs = getHeaders(this); - let i = 0; - while (i < pairs.length) { - var _pairs$i = pairs[i]; - const name = _pairs$i[0], - value = _pairs$i[1]; +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; - callback.call(thisArg, value, name, this); - pairs = getHeaders(this); - i++; - } - } +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; - /** - * Overwrite header values given name - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - set(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - this[MAP][key !== undefined ? key : name] = [value]; - } + callback = once(callback || noop); - /** - * Append a value onto existing header - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - append(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - if (key !== undefined) { - this[MAP][key].push(value); - } else { - this[MAP][name] = [value]; - } - } + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; - /** - * Check for header name existence - * - * @param String name Header name - * @return Boolean - */ - has(name) { - name = `${name}`; - validateName(name); - return find(this[MAP], name) !== undefined; - } + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; - /** - * Delete all header values given name - * - * @param String name Header name - * @return Void - */ - delete(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key !== undefined) { - delete this[MAP][key]; - } - } + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; - /** - * Return raw headers (non-spec api) - * - * @return Object - */ - raw() { - return this[MAP]; - } + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; - /** - * Get an iterator on keys. - * - * @return Iterator - */ - keys() { - return createHeadersIterator(this, 'key'); - } + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; - /** - * Get an iterator on values. - * - * @return Iterator - */ - values() { - return createHeadersIterator(this, 'value'); - } + var onerror = function(err) { + callback.call(stream, err); + }; - /** - * Get an iterator on entries. - * - * This is the default iterator of the Headers object. - * - * @return Iterator - */ - [Symbol.iterator]() { - return createHeadersIterator(this, 'key+value'); - } -} -Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + var onclose = function() { + process.nextTick(onclosenexttick); + }; -Object.defineProperty(Headers.prototype, Symbol.toStringTag, { - value: 'Headers', - writable: false, - enumerable: false, - configurable: true -}); + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true } -}); + var onrequest = function() { + stream.req.on('finish', onfinish); + }; -function getHeaders(headers) { - let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } - const keys = Object.keys(headers[MAP]).sort(); - return keys.map(kind === 'key' ? function (k) { - return k.toLowerCase(); - } : kind === 'value' ? function (k) { - return headers[MAP][k].join(', '); - } : function (k) { - return [k.toLowerCase(), headers[MAP][k].join(', ')]; - }); -} + if (isChildProcess(stream)) stream.on('exit', onexit); -const INTERNAL = Symbol('internal'); + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); -function createHeadersIterator(target, kind) { - const iterator = Object.create(HeadersIteratorPrototype); - iterator[INTERNAL] = { - target, - kind, - index: 0 + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); }; - return iterator; -} +}; -const HeadersIteratorPrototype = Object.setPrototypeOf({ - next() { - // istanbul ignore if - if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { - throw new TypeError('Value of `this` is not a HeadersIterator'); - } +module.exports = eos; - var _INTERNAL = this[INTERNAL]; - const target = _INTERNAL.target, - kind = _INTERNAL.kind, - index = _INTERNAL.index; - const values = getHeaders(target, kind); - const len = values.length; - if (index >= len) { - return { - value: undefined, - done: true - }; - } +/***/ }), - this[INTERNAL].index = index + 1; +/***/ 5447: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { - return { - value: values[index], - done: false - }; - } -}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); +"use strict"; -Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { - value: 'HeadersIterator', - writable: false, - enumerable: false, - configurable: true -}); +const path = __webpack_require__(5622); +const childProcess = __webpack_require__(3129); +const crossSpawn = __webpack_require__(2746); +const stripEof = __webpack_require__(5515); +const npmRunPath = __webpack_require__(502); +const isStream = __webpack_require__(1554); +const _getStream = __webpack_require__(1766); +const pFinally = __webpack_require__(1330); +const onExit = __webpack_require__(4931); +const errname = __webpack_require__(4689); +const stdio = __webpack_require__(166); -/** - * Export the Headers object in a form that Node.js can consume. - * - * @param Headers headers - * @return Object - */ -function exportNodeCompatibleHeaders(headers) { - const obj = Object.assign({ __proto__: null }, headers[MAP]); +const TEN_MEGABYTES = 1000 * 1000 * 10; - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host'); - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0]; - } +function handleArgs(cmd, args, opts) { + let parsed; - return obj; -} + opts = Object.assign({ + extendEnv: true, + env: {} + }, opts); -/** - * Create a Headers object from an object of headers, ignoring those that do - * not conform to HTTP grammar productions. - * - * @param Object obj Object of headers - * @return Headers - */ -function createHeadersLenient(obj) { - const headers = new Headers(); - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue; - } - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue; - } - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val]; - } else { - headers[MAP][name].push(val); - } + if (opts.extendEnv) { + opts.env = Object.assign({}, process.env, opts.env); + } + + if (opts.__winShell === true) { + delete opts.__winShell; + parsed = { + command: cmd, + args, + options: opts, + file: cmd, + original: { + cmd, + args } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]]; - } + }; + } else { + parsed = crossSpawn._parse(cmd, args, opts); } - return headers; -} -const INTERNALS$1 = Symbol('Response internals'); + opts = Object.assign({ + maxBuffer: TEN_MEGABYTES, + buffer: true, + stripEof: true, + preferLocal: true, + localDir: parsed.options.cwd || process.cwd(), + encoding: 'utf8', + reject: true, + cleanup: true + }, parsed.options); -// fix an issue where "STATUS_CODES" aren't a named export for node <10 -const STATUS_CODES = http.STATUS_CODES; + opts.stdio = stdio(opts); -/** - * Response class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class Response { - constructor() { - let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + if (opts.preferLocal) { + opts.env = npmRunPath.env(Object.assign({}, opts, {cwd: opts.localDir})); + } - Body.call(this, body, opts); + if (opts.detached) { + // #115 + opts.cleanup = false; + } - const status = opts.status || 200; - const headers = new Headers(opts.headers); + if (process.platform === 'win32' && path.basename(parsed.command) === 'cmd.exe') { + // #116 + parsed.args.unshift('/q'); + } - if (body != null && !headers.has('Content-Type')) { - const contentType = extractContentType(body); - if (contentType) { - headers.append('Content-Type', contentType); - } - } + return { + cmd: parsed.command, + args: parsed.args, + opts, + parsed + }; +} - this[INTERNALS$1] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter - }; +function handleInput(spawned, input) { + if (input === null || input === undefined) { + return; } - get url() { - return this[INTERNALS$1].url || ''; + if (isStream(input)) { + input.pipe(spawned.stdin); + } else { + spawned.stdin.end(input); } +} - get status() { - return this[INTERNALS$1].status; +function handleOutput(opts, val) { + if (val && opts.stripEof) { + val = stripEof(val); } - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + return val; +} + +function handleShell(fn, cmd, opts) { + let file = '/bin/sh'; + let args = ['-c', cmd]; + + opts = Object.assign({}, opts); + + if (process.platform === 'win32') { + opts.__winShell = true; + file = process.env.comspec || 'cmd.exe'; + args = ['/s', '/c', `"${cmd}"`]; + opts.windowsVerbatimArguments = true; } - get redirected() { - return this[INTERNALS$1].counter > 0; + if (opts.shell) { + file = opts.shell; + delete opts.shell; } - get statusText() { - return this[INTERNALS$1].statusText; + return fn(file, args, opts); +} + +function getStream(process, stream, {encoding, buffer, maxBuffer}) { + if (!process[stream]) { + return null; } - get headers() { - return this[INTERNALS$1].headers; - } + let ret; - /** - * Clone this response - * - * @return Response - */ - clone() { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected + if (!buffer) { + // TODO: Use `ret = util.promisify(stream.finished)(process[stream]);` when targeting Node.js 10 + ret = new Promise((resolve, reject) => { + process[stream] + .once('end', resolve) + .once('error', reject); + }); + } else if (encoding) { + ret = _getStream(process[stream], { + encoding, + maxBuffer }); + } else { + ret = _getStream.buffer(process[stream], {maxBuffer}); } + + return ret.catch(err => { + err.stream = stream; + err.message = `${stream} ${err.message}`; + throw err; + }); } -Body.mixIn(Response.prototype); +function makeError(result, options) { + const {stdout, stderr} = result; -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true } -}); + let err = result.error; + const {code, signal} = result; -Object.defineProperty(Response.prototype, Symbol.toStringTag, { - value: 'Response', - writable: false, - enumerable: false, - configurable: true -}); + const {parsed, joinedCmd} = options; + const timedOut = options.timedOut || false; -const INTERNALS$2 = Symbol('Request internals'); + if (!err) { + let output = ''; -// fix an issue where "format", "parse" aren't a named export for node <10 -const parse_url = Url.parse; -const format_url = Url.format; + if (Array.isArray(parsed.opts.stdio)) { + if (parsed.opts.stdio[2] !== 'inherit') { + output += output.length > 0 ? stderr : `\n${stderr}`; + } -const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + if (parsed.opts.stdio[1] !== 'inherit') { + output += `\n${stdout}`; + } + } else if (parsed.opts.stdio !== 'inherit') { + output = `\n${stderr}${stdout}`; + } -/** - * Check if a value is an instance of Request. - * - * @param Mixed input - * @return Boolean - */ -function isRequest(input) { - return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; + err = new Error(`Command failed: ${joinedCmd}${output}`); + err.code = code < 0 ? errname(code) : code; + } + + err.stdout = stdout; + err.stderr = stderr; + err.failed = true; + err.signal = signal || null; + err.cmd = joinedCmd; + err.timedOut = timedOut; + + return err; } -function isAbortSignal(signal) { - const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); - return !!(proto && proto.constructor.name === 'AbortSignal'); +function joinCmd(cmd, args) { + let joinedCmd = cmd; + + if (Array.isArray(args) && args.length > 0) { + joinedCmd += ' ' + args.join(' '); + } + + return joinedCmd; } -/** - * Request class - * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void - */ -class Request { - constructor(input) { - let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; +module.exports = (cmd, args, opts) => { + const parsed = handleArgs(cmd, args, opts); + const {encoding, buffer, maxBuffer} = parsed.opts; + const joinedCmd = joinCmd(cmd, args); - let parsedURL; + let spawned; + try { + spawned = childProcess.spawn(parsed.cmd, parsed.args, parsed.opts); + } catch (err) { + return Promise.reject(err); + } - // normalize input - if (!isRequest(input)) { - if (input && input.href) { - // in order to support Node.js' Url objects; though WHATWG's URL objects - // will fall into this branch also (since their `toString()` will return - // `href` property anyway) - parsedURL = parse_url(input.href); - } else { - // coerce input to a string before attempting to parse - parsedURL = parse_url(`${input}`); - } - input = {}; - } else { - parsedURL = parse_url(input.url); - } + let removeExitHandler; + if (parsed.opts.cleanup) { + removeExitHandler = onExit(() => { + spawned.kill(); + }); + } - let method = init.method || input.method || 'GET'; - method = method.toUpperCase(); + let timeoutId = null; + let timedOut = false; - if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); + const cleanup = () => { + if (timeoutId) { + clearTimeout(timeoutId); + timeoutId = null; } - let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + if (removeExitHandler) { + removeExitHandler(); + } + }; - Body.call(this, inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0 + if (parsed.opts.timeout > 0) { + timeoutId = setTimeout(() => { + timeoutId = null; + timedOut = true; + spawned.kill(parsed.opts.killSignal); + }, parsed.opts.timeout); + } + + const processDone = new Promise(resolve => { + spawned.on('exit', (code, signal) => { + cleanup(); + resolve({code, signal}); }); - const headers = new Headers(init.headers || input.headers || {}); + spawned.on('error', err => { + cleanup(); + resolve({error: err}); + }); - if (inputBody != null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody); - if (contentType) { - headers.append('Content-Type', contentType); - } + if (spawned.stdin) { + spawned.stdin.on('error', err => { + cleanup(); + resolve({error: err}); + }); } + }); - let signal = isRequest(input) ? input.signal : null; - if ('signal' in init) signal = init.signal; + function destroy() { + if (spawned.stdout) { + spawned.stdout.destroy(); + } - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal'); + if (spawned.stderr) { + spawned.stderr.destroy(); } + } - this[INTERNALS$2] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal - }; + const handlePromise = () => pFinally(Promise.all([ + processDone, + getStream(spawned, 'stdout', {encoding, buffer, maxBuffer}), + getStream(spawned, 'stderr', {encoding, buffer, maxBuffer}) + ]).then(arr => { + const result = arr[0]; + result.stdout = arr[1]; + result.stderr = arr[2]; - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; - this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - } + if (result.error || result.code !== 0 || result.signal !== null) { + const err = makeError(result, { + joinedCmd, + parsed, + timedOut + }); - get method() { - return this[INTERNALS$2].method; - } + // TODO: missing some timeout logic for killed + // https://github.com/nodejs/node/blob/master/lib/child_process.js#L203 + // err.killed = spawned.killed || killed; + err.killed = err.killed || spawned.killed; - get url() { - return format_url(this[INTERNALS$2].parsedURL); - } + if (!parsed.opts.reject) { + return err; + } - get headers() { - return this[INTERNALS$2].headers; - } + throw err; + } + + return { + stdout: handleOutput(parsed.opts, result.stdout), + stderr: handleOutput(parsed.opts, result.stderr), + code: 0, + failed: false, + killed: false, + signal: null, + cmd: joinedCmd, + timedOut: false + }; + }), destroy); - get redirect() { - return this[INTERNALS$2].redirect; - } + crossSpawn._enoent.hookChildProcess(spawned, parsed.parsed); - get signal() { - return this[INTERNALS$2].signal; - } + handleInput(spawned, parsed.opts.input); - /** - * Clone this request - * - * @return Request - */ - clone() { - return new Request(this); - } -} + spawned.then = (onfulfilled, onrejected) => handlePromise().then(onfulfilled, onrejected); + spawned.catch = onrejected => handlePromise().catch(onrejected); -Body.mixIn(Request.prototype); + return spawned; +}; -Object.defineProperty(Request.prototype, Symbol.toStringTag, { - value: 'Request', - writable: false, - enumerable: false, - configurable: true -}); +// TODO: set `stderr: 'ignore'` when that option is implemented +module.exports.stdout = (...args) => module.exports(...args).then(x => x.stdout); -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true } -}); +// TODO: set `stdout: 'ignore'` when that option is implemented +module.exports.stderr = (...args) => module.exports(...args).then(x => x.stderr); -/** - * Convert a Request to Node.js http request options. - * - * @param Request A Request instance - * @return Object The options object to be passed to http.request - */ -function getNodeRequestOptions(request) { - const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new Headers(request[INTERNALS$2].headers); +module.exports.shell = (cmd, opts) => handleShell(module.exports, cmd, opts); - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } +module.exports.sync = (cmd, args, opts) => { + const parsed = handleArgs(cmd, args, opts); + const joinedCmd = joinCmd(cmd, args); - // Basic fetch - if (!parsedURL.protocol || !parsedURL.hostname) { - throw new TypeError('Only absolute URLs are supported'); + if (isStream(parsed.opts.input)) { + throw new TypeError('The `input` option cannot be a stream in sync mode'); } - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported'); - } + const result = childProcess.spawnSync(parsed.cmd, parsed.args, parsed.opts); + result.code = result.status; - if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { - throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); - } + if (result.error || result.status !== 0 || result.signal !== null) { + const err = makeError(result, { + joinedCmd, + parsed + }); - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { - contentLengthValue = '0'; - } - if (request.body != null) { - const totalBytes = getTotalBytes(request); - if (typeof totalBytes === 'number') { - contentLengthValue = String(totalBytes); + if (!parsed.opts.reject) { + return err; } - } - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); - } - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + throw err; } - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate'); - } + return { + stdout: handleOutput(parsed.opts, result.stdout), + stderr: handleOutput(parsed.opts, result.stderr), + code: 0, + failed: false, + signal: null, + cmd: joinedCmd, + timedOut: false + }; +}; - let agent = request.agent; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } +module.exports.shellSync = (cmd, opts) => handleShell(module.exports.sync, cmd, opts); - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); - } - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js +/***/ }), - return Object.assign({}, parsedURL, { - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent - }); -} +/***/ 4689: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { -/** - * abort-error.js - * - * AbortError interface for cancelled requests - */ +"use strict"; -/** - * Create AbortError instance - * - * @param String message Error message for human - * @return AbortError - */ -function AbortError(message) { - Error.call(this, message); +// Older verions of Node.js might not have `util.getSystemErrorName()`. +// In that case, fall back to a deprecated internal. +const util = __webpack_require__(1669); - this.type = 'aborted'; - this.message = message; +let uv; - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} +if (typeof util.getSystemErrorName === 'function') { + module.exports = util.getSystemErrorName; +} else { + try { + uv = process.binding('uv'); -AbortError.prototype = Object.create(Error.prototype); -AbortError.prototype.constructor = AbortError; -AbortError.prototype.name = 'AbortError'; + if (typeof uv.errname !== 'function') { + throw new TypeError('uv.errname is not a function'); + } + } catch (err) { + console.error('execa/lib/errname: unable to establish process.binding(\'uv\')', err); + uv = null; + } -// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 -const PassThrough$1 = Stream.PassThrough; -const resolve_url = Url.resolve; + module.exports = code => errname(uv, code); +} -/** - * Fetch function - * - * @param Mixed url Absolute url or Request instance - * @param Object opts Fetch options - * @return Promise - */ -function fetch(url, opts) { +// Used for testing the fallback behavior +module.exports.__test__ = errname; - // allow custom promise - if (!fetch.Promise) { - throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); +function errname(uv, code) { + if (uv) { + return uv.errname(code); } - Body.Promise = fetch.Promise; - - // wrap http.request into fetch - return new fetch.Promise(function (resolve, reject) { - // build request object - const request = new Request(url, opts); - const options = getNodeRequestOptions(request); + if (!(code < 0)) { + throw new Error('err >= 0'); + } - const send = (options.protocol === 'https:' ? https : http).request; - const signal = request.signal; + return `Unknown system error ${code}`; +} - let response = null; - const abort = function abort() { - let error = new AbortError('The user aborted a request.'); - reject(error); - if (request.body && request.body instanceof Stream.Readable) { - request.body.destroy(error); - } - if (!response || !response.body) return; - response.body.emit('error', error); - }; - if (signal && signal.aborted) { - abort(); - return; - } +/***/ }), - const abortAndFinalize = function abortAndFinalize() { - abort(); - finalize(); - }; +/***/ 166: +/***/ ((module) => { - // send request - const req = send(options); - let reqTimeout; +"use strict"; - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } +const alias = ['stdin', 'stdout', 'stderr']; - function finalize() { - req.abort(); - if (signal) signal.removeEventListener('abort', abortAndFinalize); - clearTimeout(reqTimeout); - } +const hasAlias = opts => alias.some(x => Boolean(opts[x])); - if (request.timeout) { - req.once('socket', function (socket) { - reqTimeout = setTimeout(function () { - reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); - finalize(); - }, request.timeout); - }); - } +module.exports = opts => { + if (!opts) { + return null; + } - req.on('error', function (err) { - reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); - finalize(); - }); + if (opts.stdio && hasAlias(opts)) { + throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`); + } - req.on('response', function (res) { - clearTimeout(reqTimeout); + if (typeof opts.stdio === 'string') { + return opts.stdio; + } - const headers = createHeadersLenient(res.headers); + const stdio = opts.stdio || []; - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); + if (!Array.isArray(stdio)) { + throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``); + } - // HTTP fetch step 5.3 - const locationURL = location === null ? null : resolve_url(request.url, location); + const result = []; + const len = Math.max(stdio.length, alias.length); - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL); - } catch (err) { - // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request - reject(err); - } - } - break; - case 'follow': - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } + for (let i = 0; i < len; i++) { + let value = null; - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } + if (stdio[i] !== undefined) { + value = stdio[i]; + } else if (opts[alias[i]] !== undefined) { + value = opts[alias[i]]; + } - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - size: request.size - }; + result[i] = value; + } - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } + return result; +}; - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { - requestOpts.method = 'GET'; - requestOpts.body = undefined; - requestOpts.headers.delete('content-length'); - } - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))); - finalize(); - return; - } - } +/***/ }), - // prepare response - res.once('end', function () { - if (signal) signal.removeEventListener('abort', abortAndFinalize); - }); - let body = res.pipe(new PassThrough$1()); +/***/ 1585: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { - const response_options = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter - }; +"use strict"; - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); +const {PassThrough} = __webpack_require__(2413); - // HTTP-network fetch step 12.1.1.4: handle content codings +module.exports = options => { + options = Object.assign({}, options); - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { - response = new Response(body, response_options); - resolve(response); - return; - } + const {array} = options; + let {encoding} = options; + const buffer = encoding === 'buffer'; + let objectMode = false; - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.Z_SYNC_FLUSH, - finishFlush: zlib.Z_SYNC_FLUSH - }; + if (array) { + objectMode = !(encoding || buffer); + } else { + encoding = encoding || 'utf8'; + } - // for gzip - if (codings == 'gzip' || codings == 'x-gzip') { - body = body.pipe(zlib.createGunzip(zlibOptions)); - response = new Response(body, response_options); - resolve(response); - return; - } + if (buffer) { + encoding = null; + } - // for deflate - if (codings == 'deflate' || codings == 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new PassThrough$1()); - raw.once('data', function (chunk) { - // see http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = body.pipe(zlib.createInflate()); - } else { - body = body.pipe(zlib.createInflateRaw()); - } - response = new Response(body, response_options); - resolve(response); - }); - return; - } + let len = 0; + const ret = []; + const stream = new PassThrough({objectMode}); - // for br - if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { - body = body.pipe(zlib.createBrotliDecompress()); - response = new Response(body, response_options); - resolve(response); - return; - } + if (encoding) { + stream.setEncoding(encoding); + } - // otherwise, use response as-is - response = new Response(body, response_options); - resolve(response); - }); + stream.on('data', chunk => { + ret.push(chunk); - writeToStream(req, request); + if (objectMode) { + len = ret.length; + } else { + len += chunk.length; + } }); -} -/** - * Redirect code matching - * - * @param Number code Status code - * @return Boolean - */ -fetch.isRedirect = function (code) { - return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; -}; -// expose Promise -fetch.Promise = global.Promise; + stream.getBufferedValue = () => { + if (array) { + return ret; + } -module.exports = exports = fetch; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = exports; -exports.Headers = Headers; -exports.Request = Request; -exports.Response = Response; -exports.FetchError = FetchError; + return buffer ? Buffer.concat(ret, len) : ret.join(''); + }; + + stream.getBufferedLength = () => len; + + return stream; +}; /***/ }), -/***/ 493: -/***/ (function(module, __unusedexports, __webpack_require__) { +/***/ 1766: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { "use strict"; -const os = __webpack_require__(87); +const pump = __webpack_require__(8341); +const bufferStream = __webpack_require__(1585); -const nameMap = new Map([ - [20, ['Big Sur', '11']], - [19, ['Catalina', '10.15']], - [18, ['Mojave', '10.14']], - [17, ['High Sierra', '10.13']], - [16, ['Sierra', '10.12']], - [15, ['El Capitan', '10.11']], - [14, ['Yosemite', '10.10']], - [13, ['Mavericks', '10.9']], - [12, ['Mountain Lion', '10.8']], - [11, ['Lion', '10.7']], - [10, ['Snow Leopard', '10.6']], - [9, ['Leopard', '10.5']], - [8, ['Tiger', '10.4']], - [7, ['Panther', '10.3']], - [6, ['Jaguar', '10.2']], - [5, ['Puma', '10.1']] -]); +class MaxBufferError extends Error { + constructor() { + super('maxBuffer exceeded'); + this.name = 'MaxBufferError'; + } +} + +function getStream(inputStream, options) { + if (!inputStream) { + return Promise.reject(new Error('Expected a stream')); + } + + options = Object.assign({maxBuffer: Infinity}, options); + + const {maxBuffer} = options; -const macosRelease = release => { - release = Number((release || os.release()).split('.')[0]); + let stream; + return new Promise((resolve, reject) => { + const rejectPromise = error => { + if (error) { // A null check + error.bufferedData = stream.getBufferedValue(); + } + reject(error); + }; - const [name, version] = nameMap.get(release); + stream = pump(inputStream, bufferStream(options), error => { + if (error) { + rejectPromise(error); + return; + } - return { - name, - version - }; -}; + resolve(); + }); -module.exports = macosRelease; -// TODO: remove this in the next major version -module.exports.default = macosRelease; + stream.on('data', () => { + if (stream.getBufferedLength() > maxBuffer) { + rejectPromise(new MaxBufferError()); + } + }); + }).then(() => stream.getBufferedValue()); +} + +module.exports = getStream; +module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'})); +module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true})); +module.exports.MaxBufferError = MaxBufferError; /***/ }), -/***/ 502: -/***/ (function(module, __unusedexports, __webpack_require__) { +/***/ 8840: +/***/ ((module) => { "use strict"; -const path = __webpack_require__(622); -const pathKey = __webpack_require__(539); - -module.exports = opts => { - opts = Object.assign({ - cwd: process.cwd(), - path: process.env[pathKey()] - }, opts); - - let prev; - let pth = path.resolve(opts.cwd); - const ret = []; - while (prev !== pth) { - ret.push(path.join(pth, 'node_modules/.bin')); - prev = pth; - pth = path.resolve(pth, '..'); - } +/*! + * isobject + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ - // ensure the running `node` binary is used - ret.push(path.dirname(process.execPath)); +function isObject(val) { + return val != null && typeof val === 'object' && Array.isArray(val) === false; +} - return ret.concat(opts.path).join(path.delimiter); -}; +/*! + * is-plain-object + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ -module.exports.env = opts => { - opts = Object.assign({ - env: process.env - }, opts); +function isObjectObject(o) { + return isObject(o) === true + && Object.prototype.toString.call(o) === '[object Object]'; +} - const env = Object.assign({}, opts.env); - const path = pathKey({env}); +function isPlainObject(o) { + var ctor,prot; - opts.path = env[path]; - env[path] = module.exports(opts); + if (isObjectObject(o) === false) return false; - return env; -}; + // If has modified constructor + ctor = o.constructor; + if (typeof ctor !== 'function') return false; + // If has modified prototype + prot = ctor.prototype; + if (isObjectObject(prot) === false) return false; -/***/ }), + // If constructor does not have an Object-specific method + if (prot.hasOwnProperty('isPrototypeOf') === false) { + return false; + } -/***/ 515: -/***/ (function(module, __unusedexports, __webpack_require__) { + // Most likely a plain Object + return true; +} -"use strict"; +module.exports = isPlainObject; -const os = __webpack_require__(87); -const execa = __webpack_require__(447); -// Reference: https://www.gaijin.at/en/lstwinver.php -const names = new Map([ - ['10.0', '10'], - ['6.3', '8.1'], - ['6.2', '8'], - ['6.1', '7'], - ['6.0', 'Vista'], - ['5.2', 'Server 2003'], - ['5.1', 'XP'], - ['5.0', '2000'], - ['4.9', 'ME'], - ['4.1', '98'], - ['4.0', '95'] -]); +/***/ }), -const windowsRelease = release => { - const version = /\d+\.\d/.exec(release || os.release()); +/***/ 1554: +/***/ ((module) => { - if (release && !version) { - throw new Error('`release` argument doesn\'t match `n.n`'); - } +"use strict"; - const ver = (version || [])[0]; - // Server 2008, 2012, 2016, and 2019 versions are ambiguous with desktop versions and must be detected at runtime. - // If `release` is omitted or we're on a Windows system, and the version number is an ambiguous version - // then use `wmic` to get the OS caption: https://msdn.microsoft.com/en-us/library/aa394531(v=vs.85).aspx - // If `wmic` is obsoloete (later versions of Windows 10), use PowerShell instead. - // If the resulting caption contains the year 2008, 2012, 2016 or 2019, it is a server version, so return a server OS name. - if ((!release || release === os.release()) && ['6.1', '6.2', '6.3', '10.0'].includes(ver)) { - let stdout; - try { - stdout = execa.sync('wmic', ['os', 'get', 'Caption']).stdout || ''; - } catch (_) { - stdout = execa.sync('powershell', ['(Get-CimInstance -ClassName Win32_OperatingSystem).caption']).stdout || ''; - } +var isStream = module.exports = function (stream) { + return stream !== null && typeof stream === 'object' && typeof stream.pipe === 'function'; +}; - const year = (stdout.match(/2008|2012|2016|2019/) || [])[0]; +isStream.writable = function (stream) { + return isStream(stream) && stream.writable !== false && typeof stream._write === 'function' && typeof stream._writableState === 'object'; +}; - if (year) { - return `Server ${year}`; - } - } +isStream.readable = function (stream) { + return isStream(stream) && stream.readable !== false && typeof stream._read === 'function' && typeof stream._readableState === 'object'; +}; - return names.get(ver); +isStream.duplex = function (stream) { + return isStream.writable(stream) && isStream.readable(stream); }; -module.exports = windowsRelease; +isStream.transform = function (stream) { + return isStream.duplex(stream) && typeof stream._transform === 'function' && typeof stream._transformState === 'object'; +}; /***/ }), -/***/ 537: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - +/***/ 7126: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { -Object.defineProperty(exports, '__esModule', { value: true }); +var fs = __webpack_require__(5747) +var core +if (process.platform === 'win32' || global.TESTING_WINDOWS) { + core = __webpack_require__(2001) +} else { + core = __webpack_require__(9728) +} -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } +module.exports = isexe +isexe.sync = sync -var deprecation = __webpack_require__(932); -var once = _interopDefault(__webpack_require__(223)); +function isexe (path, options, cb) { + if (typeof options === 'function') { + cb = options + options = {} + } -const logOnce = once(deprecation => console.warn(deprecation)); -/** - * Error with extra properties to help with debugging - */ + if (!cb) { + if (typeof Promise !== 'function') { + throw new TypeError('callback not provided') + } -class RequestError extends Error { - constructor(message, statusCode, options) { - super(message); // Maintains proper stack trace (only available on V8) + return new Promise(function (resolve, reject) { + isexe(path, options || {}, function (er, is) { + if (er) { + reject(er) + } else { + resolve(is) + } + }) + }) + } - /* istanbul ignore next */ + core(path, options || {}, function (er, is) { + // ignore EACCES because that just means we aren't allowed to run it + if (er) { + if (er.code === 'EACCES' || options && options.ignoreErrors) { + er = null + is = false + } + } + cb(er, is) + }) +} - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); +function sync (path, options) { + // my kingdom for a filtered catch + try { + return core.sync(path, options || {}) + } catch (er) { + if (options && options.ignoreErrors || er.code === 'EACCES') { + return false + } else { + throw er } + } +} - this.name = "HttpError"; - this.status = statusCode; - Object.defineProperty(this, "code", { - get() { - logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); - return statusCode; - } - }); - this.headers = options.headers || {}; // redact request credentials without mutating original request options +/***/ }), - const requestCopy = Object.assign({}, options.request); +/***/ 9728: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { - if (options.request.headers.authorization) { - requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") - }); - } +module.exports = isexe +isexe.sync = sync - requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit - // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications - .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended - // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header - .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; - } +var fs = __webpack_require__(5747) + +function isexe (path, options, cb) { + fs.stat(path, function (er, stat) { + cb(er, er ? false : checkStat(stat, options)) + }) +} +function sync (path, options) { + return checkStat(fs.statSync(path), options) } -exports.RequestError = RequestError; -//# sourceMappingURL=index.js.map - +function checkStat (stat, options) { + return stat.isFile() && checkMode(stat, options) +} -/***/ }), +function checkMode (stat, options) { + var mod = stat.mode + var uid = stat.uid + var gid = stat.gid -/***/ 539: -/***/ (function(module) { + var myUid = options.uid !== undefined ? + options.uid : process.getuid && process.getuid() + var myGid = options.gid !== undefined ? + options.gid : process.getgid && process.getgid() -"use strict"; + var u = parseInt('100', 8) + var g = parseInt('010', 8) + var o = parseInt('001', 8) + var ug = u | g -module.exports = opts => { - opts = opts || {}; + var ret = (mod & o) || + (mod & g) && gid === myGid || + (mod & u) && uid === myUid || + (mod & ug) && myUid === 0 - const env = opts.env || process.env; - const platform = opts.platform || process.platform; + return ret +} - if (platform !== 'win32') { - return 'PATH'; - } - return Object.keys(env).find(x => x.toUpperCase() === 'PATH') || 'Path'; -}; +/***/ }), +/***/ 2001: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { -/***/ }), +module.exports = isexe +isexe.sync = sync -/***/ 549: -/***/ (function(module) { +var fs = __webpack_require__(5747) -module.exports = addHook +function checkPathExt (path, options) { + var pathext = options.pathExt !== undefined ? + options.pathExt : process.env.PATHEXT -function addHook (state, kind, name, hook) { - var orig = hook - if (!state.registry[name]) { - state.registry[name] = [] + if (!pathext) { + return true } - if (kind === 'before') { - hook = function (method, options) { - return Promise.resolve() - .then(orig.bind(null, options)) - .then(method.bind(null, options)) - } + pathext = pathext.split(';') + if (pathext.indexOf('') !== -1) { + return true } - - if (kind === 'after') { - hook = function (method, options) { - var result - return Promise.resolve() - .then(method.bind(null, options)) - .then(function (result_) { - result = result_ - return orig(result, options) - }) - .then(function () { - return result - }) + for (var i = 0; i < pathext.length; i++) { + var p = pathext[i].toLowerCase() + if (p && path.substr(-p.length).toLowerCase() === p) { + return true } } + return false +} - if (kind === 'error') { - hook = function (method, options) { - return Promise.resolve() - .then(method.bind(null, options)) - .catch(function (error) { - return orig(error, options) - }) - } +function checkStat (stat, path, options) { + if (!stat.isSymbolicLink() && !stat.isFile()) { + return false } + return checkPathExt(path, options) +} - state.registry[name].push({ - hook: hook, - orig: orig +function isexe (path, options, cb) { + fs.stat(path, function (er, stat) { + cb(er, er ? false : checkStat(stat, path, options)) }) } +function sync (path, options) { + return checkStat(fs.statSync(path), path, options) +} + /***/ }), -/***/ 554: -/***/ (function(module) { +/***/ 7493: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { "use strict"; +const os = __webpack_require__(2087); -var isStream = module.exports = function (stream) { - return stream !== null && typeof stream === 'object' && typeof stream.pipe === 'function'; -}; +const nameMap = new Map([ + [20, ['Big Sur', '11']], + [19, ['Catalina', '10.15']], + [18, ['Mojave', '10.14']], + [17, ['High Sierra', '10.13']], + [16, ['Sierra', '10.12']], + [15, ['El Capitan', '10.11']], + [14, ['Yosemite', '10.10']], + [13, ['Mavericks', '10.9']], + [12, ['Mountain Lion', '10.8']], + [11, ['Lion', '10.7']], + [10, ['Snow Leopard', '10.6']], + [9, ['Leopard', '10.5']], + [8, ['Tiger', '10.4']], + [7, ['Panther', '10.3']], + [6, ['Jaguar', '10.2']], + [5, ['Puma', '10.1']] +]); -isStream.writable = function (stream) { - return isStream(stream) && stream.writable !== false && typeof stream._write === 'function' && typeof stream._writableState === 'object'; -}; +const macosRelease = release => { + release = Number((release || os.release()).split('.')[0]); -isStream.readable = function (stream) { - return isStream(stream) && stream.readable !== false && typeof stream._read === 'function' && typeof stream._readableState === 'object'; -}; + const [name, version] = nameMap.get(release); -isStream.duplex = function (stream) { - return isStream.writable(stream) && isStream.readable(stream); + return { + name, + version + }; }; -isStream.transform = function (stream) { - return isStream.duplex(stream) && typeof stream._transform === 'function' && typeof stream._transformState === 'object'; -}; +module.exports = macosRelease; +// TODO: remove this in the next major version +module.exports.default = macosRelease; /***/ }), -/***/ 560: -/***/ (function(module) { +/***/ 8560: +/***/ ((module) => { "use strict"; @@ -6146,3344 +5097,4240 @@ module.exports = function(fn) { /***/ }), -/***/ 585: -/***/ (function(module, __unusedexports, __webpack_require__) { +/***/ 467: +/***/ ((module, exports, __webpack_require__) => { "use strict"; -const {PassThrough} = __webpack_require__(413); -module.exports = options => { - options = Object.assign({}, options); +Object.defineProperty(exports, "__esModule", ({ value: true })); - const {array} = options; - let {encoding} = options; - const buffer = encoding === 'buffer'; - let objectMode = false; +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - if (array) { - objectMode = !(encoding || buffer); - } else { - encoding = encoding || 'utf8'; - } +var Stream = _interopDefault(__webpack_require__(2413)); +var http = _interopDefault(__webpack_require__(8605)); +var Url = _interopDefault(__webpack_require__(8835)); +var https = _interopDefault(__webpack_require__(7211)); +var zlib = _interopDefault(__webpack_require__(8761)); - if (buffer) { - encoding = null; - } +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js - let len = 0; - const ret = []; - const stream = new PassThrough({objectMode}); +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; - if (encoding) { - stream.setEncoding(encoding); - } +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); - stream.on('data', chunk => { - ret.push(chunk); +class Blob { + constructor() { + this[TYPE] = ''; - if (objectMode) { - len = ret.length; - } else { - len += chunk.length; + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } } - }); - stream.getBufferedValue = () => { - if (array) { - return ret; + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); } + const span = Math.max(relativeEnd - relativeStart, 0); - return buffer ? Buffer.concat(ret, len) : ret.join(''); - }; - - stream.getBufferedLength = () => len; - - return stream; -}; - - -/***/ }), - -/***/ 605: -/***/ (function(module) { - -module.exports = require("http"); + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} -/***/ }), +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); -/***/ 614: -/***/ (function(module) { +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); -module.exports = require("events"); +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ -/***/ }), +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); -/***/ 622: -/***/ (function(module) { + this.message = message; + this.type = type; -module.exports = require("path"); + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } -/***/ }), + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} -/***/ 631: -/***/ (function(module) { +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; -module.exports = require("net"); +let convert; +try { + convert = __webpack_require__(2877).convert; +} catch (e) {} -/***/ }), +const INTERNALS = Symbol('Body internals'); -/***/ 638: -/***/ (function(module) { +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; -"use strict"; +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; -module.exports = /^#!.*/; + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; -/***/ }), + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; -/***/ 668: -/***/ (function(__unusedmodule, exports, __webpack_require__) { + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} -"use strict"; +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + get bodyUsed() { + return this[INTERNALS].disturbed; + }, -Object.defineProperty(exports, '__esModule', { value: true }); + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, -var request = __webpack_require__(234); -var universalUserAgent = __webpack_require__(429); + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, -const VERSION = "4.5.1"; + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; -class GraphqlError extends Error { - constructor(request, response) { - const message = response.data.errors[0].message; - super(message); - Object.assign(this, response.data); - this.name = "GraphqlError"; - this.request = request; // Maintains proper stack trace (only available on V8) + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, - /* istanbul ignore next */ + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - } + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, -} + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; -const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"]; -function graphql(request, query, options) { - options = typeof query === "string" ? options = Object.assign({ - query - }, options) : options = query; - const requestOptions = Object.keys(options).reduce((result, key) => { - if (NON_VARIABLE_OPTIONS.includes(key)) { - result[key] = options[key]; - return result; - } + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; - if (!result.variables) { - result.variables = {}; - } +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); - result.variables[key] = options[key]; - return result; - }, {}); - return request(requestOptions).then(response => { - if (response.data.errors) { - throw new GraphqlError(requestOptions, { - data: response.data - }); - } +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; - return response.data.data; - }); -} +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; -function withDefaults(request$1, newDefaults) { - const newRequest = request$1.defaults(newDefaults); + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } - const newApi = (query, options) => { - return graphql(newRequest, query, options); - }; + this[INTERNALS].disturbed = true; - return Object.assign(newApi, { - defaults: withDefaults.bind(null, newRequest), - endpoint: request.request.endpoint - }); -} + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } -const graphql$1 = withDefaults(request.request, { - headers: { - "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}` - }, - method: "POST", - url: "/graphql" -}); -function withCustomRequest(customRequest) { - return withDefaults(customRequest, { - method: "POST", - url: "/graphql" - }); -} + let body = this.body; -exports.graphql = graphql$1; -exports.withCustomRequest = withCustomRequest; -//# sourceMappingURL=index.js.map + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + // body is blob + if (isBlob(body)) { + body = body.stream(); + } -/***/ }), + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } -/***/ 669: -/***/ (function(module) { + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } -module.exports = require("util"); + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; -/***/ }), + return new Body.Promise(function (resolve, reject) { + let resTimeout; -/***/ 670: -/***/ (function(module) { + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } -module.exports = register + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); -function register (state, name, method, options) { - if (typeof method !== 'function') { - throw new Error('method for before hook must be a function') - } + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } - if (!options) { - options = {} - } + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } - if (Array.isArray(name)) { - return name.reverse().reduce(function (callback, name) { - return register.bind(null, state, name, callback, options) - }, method)() - } + accumBytes += chunk.length; + accum.push(chunk); + }); - return Promise.resolve() - .then(function () { - if (!state.registry[name]) { - return method(options) - } + body.on('end', function () { + if (abort) { + return; + } - return (state.registry[name]).reduce(function (method, registered) { - return registered.hook.bind(null, method, options) - }, method)() - }) + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); } +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } -/***/ }), + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; -/***/ 682: -/***/ (function(module, __unusedexports, __webpack_require__) { + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } -var register = __webpack_require__(670) -var addHook = __webpack_require__(549) -var removeHook = __webpack_require__(819) + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); -// bind with array of arguments: https://stackoverflow.com/a/21792913 -var bind = Function.bind -var bindable = bind.bind(bind) + // html5 + if (!res && str) { + res = / errname(uv, code); } -// Used for testing the fallback behavior -module.exports.__test__ = errname; +/** + * Write a Body to a Node.js WritableStream (e.g. http.Request) object. + * + * @param Body instance Instance of Body + * @return Void + */ +function writeToStream(dest, instance) { + const body = instance.body; -function errname(uv, code) { - if (uv) { - return uv.errname(code); - } - if (!(code < 0)) { - throw new Error('err >= 0'); + if (body === null) { + // body is null + dest.end(); + } else if (isBlob(body)) { + body.stream().pipe(dest); + } else if (Buffer.isBuffer(body)) { + // body is buffer + dest.write(body); + dest.end(); + } else { + // body is stream + body.pipe(dest); } - - return `Unknown system error ${code}`; } +// expose Promise +Body.Promise = global.Promise; +/** + * headers.js + * + * Headers class offers convenient helpers + */ -/***/ }), - -/***/ 710: -/***/ (function(module) { - -// This is not the set of all possible signals. -// -// It IS, however, the set of all signals that trigger -// an exit on either Linux or BSD systems. Linux is a -// superset of the signal names supported on BSD, and -// the unknown signals just fail to register, so we can -// catch that easily enough. -// -// Don't bother with SIGKILL. It's uncatchable, which -// means that we can't fire any callbacks anyway. -// -// If a user does happen to register a handler on a non- -// fatal signal like SIGWINCH or something, and then -// exit, it'll end up firing `process.emit('exit')`, so -// the handler will be fired anyway. -// -// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised -// artificially, inherently leave the process in a -// state from which it is not safe to try and enter JS -// listeners. -module.exports = [ - 'SIGABRT', - 'SIGALRM', - 'SIGHUP', - 'SIGINT', - 'SIGTERM' -] +const invalidTokenRegex = /[^\^_`a-zA-Z\-0-9!#$%&'*+.|~]/; +const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/; -if (process.platform !== 'win32') { - module.exports.push( - 'SIGVTALRM', - 'SIGXCPU', - 'SIGXFSZ', - 'SIGUSR2', - 'SIGTRAP', - 'SIGSYS', - 'SIGQUIT', - 'SIGIOT' - // should detect profiler and enable/disable accordingly. - // see #21 - // 'SIGPROF' - ) +function validateName(name) { + name = `${name}`; + if (invalidTokenRegex.test(name) || name === '') { + throw new TypeError(`${name} is not a legal HTTP header name`); + } } -if (process.platform === 'linux') { - module.exports.push( - 'SIGIO', - 'SIGPOLL', - 'SIGPWR', - 'SIGSTKFLT', - 'SIGUNUSED' - ) +function validateValue(value) { + value = `${value}`; + if (invalidHeaderCharRegex.test(value)) { + throw new TypeError(`${value} is not a legal HTTP header value`); + } } +/** + * Find the key in the map object given a header name. + * + * Returns undefined if not found. + * + * @param String name Header name + * @return String|Undefined + */ +function find(map, name) { + name = name.toLowerCase(); + for (const key in map) { + if (key.toLowerCase() === name) { + return key; + } + } + return undefined; +} -/***/ }), - -/***/ 728: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = isexe -isexe.sync = sync - -var fs = __webpack_require__(747) +const MAP = Symbol('map'); +class Headers { + /** + * Headers class + * + * @param Object headers Response headers + * @return Void + */ + constructor() { + let init = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : undefined; -function isexe (path, options, cb) { - fs.stat(path, function (er, stat) { - cb(er, er ? false : checkStat(stat, options)) - }) -} + this[MAP] = Object.create(null); -function sync (path, options) { - return checkStat(fs.statSync(path), options) -} + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); -function checkStat (stat, options) { - return stat.isFile() && checkMode(stat, options) -} + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } -function checkMode (stat, options) { - var mod = stat.mode - var uid = stat.uid - var gid = stat.gid + return; + } - var myUid = options.uid !== undefined ? - options.uid : process.getuid && process.getuid() - var myGid = options.gid !== undefined ? - options.gid : process.getgid && process.getgid() + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } - var u = parseInt('100', 8) - var g = parseInt('010', 8) - var o = parseInt('001', 8) - var ug = u | g + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } - var ret = (mod & o) || - (mod & g) && gid === myGid || - (mod & u) && uid === myUid || - (mod & ug) && myUid === 0 + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } - return ret -} + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + return this[MAP][key].join(', '); + } -/***/ }), + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; -/***/ 746: -/***/ (function(module, __unusedexports, __webpack_require__) { + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; -"use strict"; + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } -const cp = __webpack_require__(129); -const parse = __webpack_require__(855); -const enoent = __webpack_require__(101); + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } -function spawn(command, args, options) { - // Parse the arguments - const parsed = parse(command, args, options); + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } - // Spawn the child process - const spawned = cp.spawn(parsed.command, parsed.args, parsed.options); + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } - // Hook into child process "exit" event to emit an error if the command - // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 - enoent.hookChildProcess(spawned, parsed); + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } - return spawned; -} + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } -function spawnSync(command, args, options) { - // Parse the arguments - const parsed = parse(command, args, options); + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } - // Spawn the child process - const result = cp.spawnSync(parsed.command, parsed.args, parsed.options); + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; - // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 - result.error = result.error || enoent.verifyENOENTSync(result.status, parsed); +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); - return result; -} +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); -module.exports = spawn; -module.exports.spawn = spawn; -module.exports.sync = spawnSync; +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; -module.exports._parse = parse; -module.exports._enoent = enoent; + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} +const INTERNAL = Symbol('internal'); -/***/ }), +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} -/***/ 747: -/***/ (function(module) { +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } -module.exports = require("fs"); + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; -/***/ }), + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } -/***/ 761: -/***/ (function(module) { + this[INTERNAL].index = index + 1; -module.exports = require("zlib"); + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); -/***/ }), +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); -/***/ 762: -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); -"use strict"; + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + return obj; +} -Object.defineProperty(exports, '__esModule', { value: true }); +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} -var universalUserAgent = __webpack_require__(429); -var beforeAfterHook = __webpack_require__(682); -var request = __webpack_require__(234); -var graphql = __webpack_require__(668); -var authToken = __webpack_require__(334); +const INTERNALS$1 = Symbol('Response internals'); -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; - return obj; -} +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); + Body.call(this, body, opts); - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - if (enumerableOnly) symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - }); - keys.push.apply(keys, symbols); - } + const status = opts.status || 200; + const headers = new Headers(opts.headers); - return keys; -} + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = arguments[i] != null ? arguments[i] : {}; + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } - if (i % 2) { - ownKeys(Object(source), true).forEach(function (key) { - _defineProperty(target, key, source[key]); - }); - } else if (Object.getOwnPropertyDescriptors) { - Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); - } else { - ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - } + get url() { + return this[INTERNALS$1].url || ''; + } - return target; -} + get status() { + return this[INTERNALS$1].status; + } -const VERSION = "3.1.0"; + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } -class Octokit { - constructor(options = {}) { - const hook = new beforeAfterHook.Collection(); - const requestDefaults = { - baseUrl: request.request.endpoint.DEFAULTS.baseUrl, - headers: {}, - request: Object.assign({}, options.request, { - hook: hook.bind(null, "request") - }), - mediaType: { - previews: [], - format: "" - } - }; // prepend default user agent with `options.userAgent` if set + get redirected() { + return this[INTERNALS$1].counter > 0; + } - requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); + get statusText() { + return this[INTERNALS$1].statusText; + } - if (options.baseUrl) { - requestDefaults.baseUrl = options.baseUrl; - } + get headers() { + return this[INTERNALS$1].headers; + } - if (options.previews) { - requestDefaults.mediaType.previews = options.previews; - } + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} - if (options.timeZone) { - requestDefaults.headers["time-zone"] = options.timeZone; - } +Body.mixIn(Response.prototype); - this.request = request.request.defaults(requestDefaults); - this.graphql = graphql.withCustomRequest(this.request).defaults(_objectSpread2(_objectSpread2({}, requestDefaults), {}, { - baseUrl: requestDefaults.baseUrl.replace(/\/api\/v3$/, "/api") - })); - this.log = Object.assign({ - debug: () => {}, - info: () => {}, - warn: console.warn.bind(console), - error: console.error.bind(console) - }, options.log); - this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance - // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registred. - // (2) If only `options.auth` is set, use the default token authentication strategy. - // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. - // TODO: type `options.auth` based on `options.authStrategy`. +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); - if (!options.authStrategy) { - if (!options.auth) { - // (1) - this.auth = async () => ({ - type: "unauthenticated" - }); - } else { - // (2) - const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); - hook.wrap("request", auth.hook); - this.auth = auth; - } - } else { - const auth = options.authStrategy(Object.assign({ - request: this.request - }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ +const INTERNALS$2 = Symbol('Request internals'); - hook.wrap("request", auth.hook); - this.auth = auth; - } // apply plugins - // https://stackoverflow.com/a/16345172 +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; - const classConstructor = this.constructor; - classConstructor.plugins.forEach(plugin => { - Object.assign(this, plugin(this, options)); - }); - } +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} - static defaults(defaults) { - const OctokitWithDefaults = class extends this { - constructor(...args) { - const options = args[0] || {}; +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} - if (typeof defaults === "function") { - super(defaults(options)); - return; - } +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { - userAgent: `${options.userAgent} ${defaults.userAgent}` - } : null)); - } + let parsedURL; - }; - return OctokitWithDefaults; - } - /** - * Attach a plugin (or many) to your Octokit instance. - * - * @example - * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) - */ + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parse_url(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parse_url(`${input}`); + } + input = {}; + } else { + parsedURL = parse_url(input.url); + } + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); - static plugin(...newPlugins) { - var _a; + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } - const currentPlugins = this.plugins; - const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); - return NewOctokit; - } + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; -} -Octokit.VERSION = VERSION; -Octokit.plugins = []; + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); -exports.Octokit = Octokit; -//# sourceMappingURL=index.js.map + const headers = new Headers(init.headers || input.headers || {}); + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } -/***/ }), + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; -/***/ 766: -/***/ (function(module, __unusedexports, __webpack_require__) { + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } -"use strict"; + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; -const pump = __webpack_require__(341); -const bufferStream = __webpack_require__(585); + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } -class MaxBufferError extends Error { - constructor() { - super('maxBuffer exceeded'); - this.name = 'MaxBufferError'; + get method() { + return this[INTERNALS$2].method; } -} -function getStream(inputStream, options) { - if (!inputStream) { - return Promise.reject(new Error('Expected a stream')); + get url() { + return format_url(this[INTERNALS$2].parsedURL); } - options = Object.assign({maxBuffer: Infinity}, options); + get headers() { + return this[INTERNALS$2].headers; + } - const {maxBuffer} = options; + get redirect() { + return this[INTERNALS$2].redirect; + } - let stream; - return new Promise((resolve, reject) => { - const rejectPromise = error => { - if (error) { // A null check - error.bufferedData = stream.getBufferedValue(); - } - reject(error); - }; + get signal() { + return this[INTERNALS$2].signal; + } - stream = pump(inputStream, bufferStream(options), error => { - if (error) { - rejectPromise(error); - return; - } + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} - resolve(); - }); +Body.mixIn(Request.prototype); - stream.on('data', () => { - if (stream.getBufferedLength() > maxBuffer) { - rejectPromise(new MaxBufferError()); - } - }); - }).then(() => stream.getBufferedValue()); -} +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); -module.exports = getStream; -module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'})); -module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true})); -module.exports.MaxBufferError = MaxBufferError; +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); -/***/ }), + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } -/***/ 774: -/***/ (function(module) { + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } -"use strict"; + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } -module.exports = function (x) { - var lf = typeof x === 'string' ? '\n' : '\n'.charCodeAt(); - var cr = typeof x === 'string' ? '\r' : '\r'.charCodeAt(); + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } - if (x[x.length - 1] === lf) { - x = x.slice(0, x.length - 1); + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); } - if (x[x.length - 1] === cr) { - x = x.slice(0, x.length - 1); + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); } - return x; -}; + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } -/***/ }), + if (!headers.has('Connection') && !agent) { + headers.set('Connection', 'close'); + } -/***/ 819: -/***/ (function(module) { + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js -module.exports = removeHook + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} -function removeHook (state, name, method) { - if (!state.registry[name]) { - return - } +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ - var index = state.registry[name] - .map(function (registered) { return registered.orig }) - .indexOf(method) +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); - if (index === -1) { - return - } + this.type = 'aborted'; + this.message = message; - state.registry[name].splice(index, 1) + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); } +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; -/***/ }), - -/***/ 824: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; +const resolve_url = Url.resolve; -const os = __webpack_require__(87); -const macosRelease = __webpack_require__(493); -const winRelease = __webpack_require__(515); +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { -const osName = (platform, release) => { - if (!platform && release) { - throw new Error('You can\'t specify a `release` without specifying `platform`'); + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); } - platform = platform || os.platform(); - - let id; + Body.Promise = fetch.Promise; - if (platform === 'darwin') { - if (!release && os.platform() === 'darwin') { - release = os.release(); - } + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); - const prefix = release ? (Number(release.split('.')[0]) > 15 ? 'macOS' : 'OS X') : 'macOS'; - id = release ? macosRelease(release).name : ''; - return prefix + (id ? ' ' + id : ''); - } + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; - if (platform === 'linux') { - if (!release && os.platform() === 'linux') { - release = os.release(); - } + let response = null; - id = release ? release.replace(/^(\d+\.\d+).*/, '$1') : ''; - return 'Linux' + (id ? ' ' + id : ''); - } + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + request.body.destroy(error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; - if (platform === 'win32') { - if (!release && os.platform() === 'win32') { - release = os.release(); + if (signal && signal.aborted) { + abort(); + return; } - id = release ? winRelease(release) : ''; - return 'Windows' + (id ? ' ' + id : ''); - } - - return platform; -}; - -module.exports = osName; - - -/***/ }), - -/***/ 835: -/***/ (function(module) { - -module.exports = require("url"); - -/***/ }), - -/***/ 840: -/***/ (function(module) { + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; -"use strict"; + // send request + const req = send(options); + let reqTimeout; + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } -/*! - * isobject - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } -function isObject(val) { - return val != null && typeof val === 'object' && Array.isArray(val) === false; -} + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } -/*! - * is-plain-object - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + finalize(); + }); -function isObjectObject(o) { - return isObject(o) === true - && Object.prototype.toString.call(o) === '[object Object]'; -} + req.on('response', function (res) { + clearTimeout(reqTimeout); -function isPlainObject(o) { - var ctor,prot; + const headers = createHeadersLenient(res.headers); - if (isObjectObject(o) === false) return false; + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); - // If has modified constructor - ctor = o.constructor; - if (typeof ctor !== 'function') return false; + // HTTP fetch step 5.3 + const locationURL = location === null ? null : resolve_url(request.url, location); - // If has modified prototype - prot = ctor.prototype; - if (isObjectObject(prot) === false) return false; + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; - } + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } - // Most likely a plain Object - return true; -} + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; -module.exports = isPlainObject; + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } -/***/ }), + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } -/***/ 855: -/***/ (function(module, __unusedexports, __webpack_require__) { + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); -"use strict"; + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); -const path = __webpack_require__(622); -const niceTry = __webpack_require__(560); -const resolveCommand = __webpack_require__(274); -const escape = __webpack_require__(48); -const readShebang = __webpack_require__(252); -const semver = __webpack_require__(911); + // HTTP-network fetch step 12.1.1.4: handle content codings -const isWin = process.platform === 'win32'; -const isExecutableRegExp = /\.(?:com|exe)$/i; -const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i; + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } -// `options.shell` is supported in Node ^4.8.0, ^5.7.0 and >= 6.0.0 -const supportsShellOption = niceTry(() => semver.satisfies(process.version, '^4.8.0 || ^5.7.0 || >= 6.0.0', true)) || false; + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; -function detectShebang(parsed) { - parsed.file = resolveCommand(parsed); + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } - const shebang = parsed.file && readShebang(parsed.file); + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + return; + } - if (shebang) { - parsed.args.unshift(parsed.file); - parsed.command = shebang; + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } - return resolveCommand(parsed); - } + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); - return parsed.file; + writeToStream(req, request); + }); } +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; -function parseNonShell(parsed) { - if (!isWin) { - return parsed; - } - - // Detect & add support for shebangs - const commandFile = detectShebang(parsed); - - // We don't need a shell if the command filename is an executable - const needsShell = !isExecutableRegExp.test(commandFile); - - // If a shell is required, use cmd.exe and take care of escaping everything correctly - // Note that `forceShell` is an hidden option used only in tests - if (parsed.options.forceShell || needsShell) { - // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/` - // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument - // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called, - // we need to double escape them - const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile); +// expose Promise +fetch.Promise = global.Promise; - // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar) - // This is necessary otherwise it will always fail with ENOENT in those cases - parsed.command = path.normalize(parsed.command); +module.exports = exports = fetch; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.default = exports; +exports.Headers = Headers; +exports.Request = Request; +exports.Response = Response; +exports.FetchError = FetchError; - // Escape command & arguments - parsed.command = escape.command(parsed.command); - parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars)); - const shellCommand = [parsed.command].concat(parsed.args).join(' '); +/***/ }), - parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; - parsed.command = process.env.comspec || 'cmd.exe'; - parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped - } +/***/ 502: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { - return parsed; -} +"use strict"; -function parseShell(parsed) { - // If node supports the shell option, there's no need to mimic its behavior - if (supportsShellOption) { - return parsed; - } +const path = __webpack_require__(5622); +const pathKey = __webpack_require__(539); - // Mimic node shell option - // See https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335 - const shellCommand = [parsed.command].concat(parsed.args).join(' '); +module.exports = opts => { + opts = Object.assign({ + cwd: process.cwd(), + path: process.env[pathKey()] + }, opts); - if (isWin) { - parsed.command = typeof parsed.options.shell === 'string' ? parsed.options.shell : process.env.comspec || 'cmd.exe'; - parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; - parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped - } else { - if (typeof parsed.options.shell === 'string') { - parsed.command = parsed.options.shell; - } else if (process.platform === 'android') { - parsed.command = '/system/bin/sh'; - } else { - parsed.command = '/bin/sh'; - } + let prev; + let pth = path.resolve(opts.cwd); + const ret = []; - parsed.args = ['-c', shellCommand]; - } + while (prev !== pth) { + ret.push(path.join(pth, 'node_modules/.bin')); + prev = pth; + pth = path.resolve(pth, '..'); + } - return parsed; -} + // ensure the running `node` binary is used + ret.push(path.dirname(process.execPath)); -function parse(command, args, options) { - // Normalize arguments, similar to nodejs - if (args && !Array.isArray(args)) { - options = args; - args = null; - } + return ret.concat(opts.path).join(path.delimiter); +}; - args = args ? args.slice(0) : []; // Clone array to avoid changing the original - options = Object.assign({}, options); // Clone object to avoid changing the original +module.exports.env = opts => { + opts = Object.assign({ + env: process.env + }, opts); - // Build our parsed object - const parsed = { - command, - args, - options, - file: undefined, - original: { - command, - args, - }, - }; + const env = Object.assign({}, opts.env); + const path = pathKey({env}); - // Delegate further parsing to shell or non-shell - return options.shell ? parseShell(parsed) : parseNonShell(parsed); -} + opts.path = env[path]; + env[path] = module.exports(opts); -module.exports = parse; + return env; +}; /***/ }), -/***/ 877: -/***/ (function(module) { - -module.exports = eval("require")("encoding"); - +/***/ 1223: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { -/***/ }), +var wrappy = __webpack_require__(2940) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) -/***/ 911: -/***/ (function(module, exports) { +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) -exports = module.exports = SemVer + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) -var debug -/* istanbul ignore next */ -if (typeof process === 'object' && - process.env && - process.env.NODE_DEBUG && - /\bsemver\b/i.test(process.env.NODE_DEBUG)) { - debug = function () { - var args = Array.prototype.slice.call(arguments, 0) - args.unshift('SEMVER') - console.log.apply(console, args) +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) } -} else { - debug = function () {} + f.called = false + return f } -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -exports.SEMVER_SPEC_VERSION = '2.0.0' - -var MAX_LENGTH = 256 -var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || - /* istanbul ignore next */ 9007199254740991 - -// Max safe segment length for coercion. -var MAX_SAFE_COMPONENT_LENGTH = 16 +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} -// The actual regexps go on exports.re -var re = exports.re = [] -var src = exports.src = [] -var R = 0 -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. +/***/ }), -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. +/***/ 4824: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { -var NUMERICIDENTIFIER = R++ -src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' -var NUMERICIDENTIFIERLOOSE = R++ -src[NUMERICIDENTIFIERLOOSE] = '[0-9]+' +"use strict"; -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. +const os = __webpack_require__(2087); +const macosRelease = __webpack_require__(7493); +const winRelease = __webpack_require__(3515); -var NONNUMERICIDENTIFIER = R++ -src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' +const osName = (platform, release) => { + if (!platform && release) { + throw new Error('You can\'t specify a `release` without specifying `platform`'); + } -// ## Main Version -// Three dot-separated numeric identifiers. + platform = platform || os.platform(); -var MAINVERSION = R++ -src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')' + let id; -var MAINVERSIONLOOSE = R++ -src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')' + if (platform === 'darwin') { + if (!release && os.platform() === 'darwin') { + release = os.release(); + } -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. + const prefix = release ? (Number(release.split('.')[0]) > 15 ? 'macOS' : 'OS X') : 'macOS'; + id = release ? macosRelease(release).name : ''; + return prefix + (id ? ' ' + id : ''); + } -var PRERELEASEIDENTIFIER = R++ -src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + - '|' + src[NONNUMERICIDENTIFIER] + ')' + if (platform === 'linux') { + if (!release && os.platform() === 'linux') { + release = os.release(); + } -var PRERELEASEIDENTIFIERLOOSE = R++ -src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + - '|' + src[NONNUMERICIDENTIFIER] + ')' + id = release ? release.replace(/^(\d+\.\d+).*/, '$1') : ''; + return 'Linux' + (id ? ' ' + id : ''); + } -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. + if (platform === 'win32') { + if (!release && os.platform() === 'win32') { + release = os.release(); + } -var PRERELEASE = R++ -src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + - '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' + id = release ? winRelease(release) : ''; + return 'Windows' + (id ? ' ' + id : ''); + } -var PRERELEASELOOSE = R++ -src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + - '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' + return platform; +}; -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. +module.exports = osName; -var BUILDIDENTIFIER = R++ -src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+' -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. +/***/ }), -var BUILD = R++ -src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + - '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' +/***/ 1330: +/***/ ((module) => { -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. +"use strict"; -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. +module.exports = (promise, onFinally) => { + onFinally = onFinally || (() => {}); -var FULL = R++ -var FULLPLAIN = 'v?' + src[MAINVERSION] + - src[PRERELEASE] + '?' + - src[BUILD] + '?' + return promise.then( + val => new Promise(resolve => { + resolve(onFinally()); + }).then(() => val), + err => new Promise(resolve => { + resolve(onFinally()); + }).then(() => { + throw err; + }) + ); +}; -src[FULL] = '^' + FULLPLAIN + '$' -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + - src[PRERELEASELOOSE] + '?' + - src[BUILD] + '?' +/***/ }), -var LOOSE = R++ -src[LOOSE] = '^' + LOOSEPLAIN + '$' +/***/ 539: +/***/ ((module) => { -var GTLT = R++ -src[GTLT] = '((?:<|>)?=?)' +"use strict"; -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -var XRANGEIDENTIFIERLOOSE = R++ -src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' -var XRANGEIDENTIFIER = R++ -src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' +module.exports = opts => { + opts = opts || {}; -var XRANGEPLAIN = R++ -src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:' + src[PRERELEASE] + ')?' + - src[BUILD] + '?' + - ')?)?' + const env = opts.env || process.env; + const platform = opts.platform || process.platform; -var XRANGEPLAINLOOSE = R++ -src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:' + src[PRERELEASELOOSE] + ')?' + - src[BUILD] + '?' + - ')?)?' + if (platform !== 'win32') { + return 'PATH'; + } -var XRANGE = R++ -src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' -var XRANGELOOSE = R++ -src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' + return Object.keys(env).find(x => x.toUpperCase() === 'PATH') || 'Path'; +}; -// Coercion. -// Extract anything that could conceivably be a part of a valid semver -var COERCE = R++ -src[COERCE] = '(?:^|[^\\d])' + - '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:$|[^\\d])' -// Tilde ranges. -// Meaning is "reasonably at or greater than" -var LONETILDE = R++ -src[LONETILDE] = '(?:~>?)' +/***/ }), -var TILDETRIM = R++ -src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' -re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') -var tildeTrimReplace = '$1~' +/***/ 8341: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { -var TILDE = R++ -src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' -var TILDELOOSE = R++ -src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' +var once = __webpack_require__(1223) +var eos = __webpack_require__(1205) +var fs = __webpack_require__(5747) // we only need fs to get the ReadStream and WriteStream prototypes -// Caret ranges. -// Meaning is "at least and backwards compatible with" -var LONECARET = R++ -src[LONECARET] = '(?:\\^)' +var noop = function () {} +var ancient = /^v?\.0/.test(process.version) -var CARETTRIM = R++ -src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' -re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') -var caretTrimReplace = '$1^' +var isFn = function (fn) { + return typeof fn === 'function' +} -var CARET = R++ -src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' -var CARETLOOSE = R++ -src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' +var isFS = function (stream) { + if (!ancient) return false // newer node version do not need to care about fs is a special way + if (!fs) return false // browser + return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) +} -// A simple gt/lt/eq thing, or just "" to indicate "any version" -var COMPARATORLOOSE = R++ -src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' -var COMPARATOR = R++ -src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' +var isRequest = function (stream) { + return stream.setHeader && isFn(stream.abort) +} -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -var COMPARATORTRIM = R++ -src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + - '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' +var destroyer = function (stream, reading, writing, callback) { + callback = once(callback) -// this one has to use the /g flag -re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') -var comparatorTrimReplace = '$1$2$3' + var closed = false + stream.on('close', function () { + closed = true + }) -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -var HYPHENRANGE = R++ -src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAIN] + ')' + - '\\s*$' + eos(stream, {readable: reading, writable: writing}, function (err) { + if (err) return callback(err) + closed = true + callback() + }) -var HYPHENRANGELOOSE = R++ -src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s*$' + var destroyed = false + return function (err) { + if (closed) return + if (destroyed) return + destroyed = true -// Star ranges basically just allow anything at all. -var STAR = R++ -src[STAR] = '(<|>)?=?\\s*\\*' + if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks + if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want -// Compile to actual regexp objects. -// All are flag-free, unless they were created above with a flag. -for (var i = 0; i < R; i++) { - debug(i, src[i]) - if (!re[i]) { - re[i] = new RegExp(src[i]) + if (isFn(stream.destroy)) return stream.destroy() + + callback(err || new Error('stream was destroyed')) } } -exports.parse = parse -function parse (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } +var call = function (fn) { + fn() +} - if (version instanceof SemVer) { - return version - } +var pipe = function (from, to) { + return from.pipe(to) +} - if (typeof version !== 'string') { - return null - } +var pump = function () { + var streams = Array.prototype.slice.call(arguments) + var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop - if (version.length > MAX_LENGTH) { - return null - } + if (Array.isArray(streams[0])) streams = streams[0] + if (streams.length < 2) throw new Error('pump requires two streams per minimum') - var r = options.loose ? re[LOOSE] : re[FULL] - if (!r.test(version)) { - return null - } + var error + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1 + var writing = i > 0 + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err + if (err) destroys.forEach(call) + if (reading) return + destroys.forEach(call) + callback(error) + }) + }) - try { - return new SemVer(version, options) - } catch (er) { - return null - } + return streams.reduce(pipe) } -exports.valid = valid -function valid (version, options) { - var v = parse(version, options) - return v ? v.version : null -} +module.exports = pump -exports.clean = clean -function clean (version, options) { - var s = parse(version.trim().replace(/^[=v]+/, ''), options) - return s ? s.version : null -} -exports.SemVer = SemVer +/***/ }), -function SemVer (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - if (version instanceof SemVer) { - if (version.loose === options.loose) { - return version - } else { - version = version.version - } - } else if (typeof version !== 'string') { - throw new TypeError('Invalid Version: ' + version) - } +/***/ 5911: +/***/ ((module, exports) => { - if (version.length > MAX_LENGTH) { - throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') - } +exports = module.exports = SemVer - if (!(this instanceof SemVer)) { - return new SemVer(version, options) +var debug +/* istanbul ignore next */ +if (typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.unshift('SEMVER') + console.log.apply(console, args) } +} else { + debug = function () {} +} - debug('SemVer', version, options) - this.options = options - this.loose = !!options.loose +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0' - var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL]) +var MAX_LENGTH = 256 +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || + /* istanbul ignore next */ 9007199254740991 - if (!m) { - throw new TypeError('Invalid Version: ' + version) - } +// Max safe segment length for coercion. +var MAX_SAFE_COMPONENT_LENGTH = 16 - this.raw = version +// The actual regexps go on exports.re +var re = exports.re = [] +var src = exports.src = [] +var R = 0 - // these are actually numbers - this.major = +m[1] - this.minor = +m[2] - this.patch = +m[3] +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError('Invalid major version') - } +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError('Invalid minor version') - } +var NUMERICIDENTIFIER = R++ +src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' +var NUMERICIDENTIFIERLOOSE = R++ +src[NUMERICIDENTIFIERLOOSE] = '[0-9]+' - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError('Invalid patch version') - } +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. - // numberify any prerelease numeric ids - if (!m[4]) { - this.prerelease = [] - } else { - this.prerelease = m[4].split('.').map(function (id) { - if (/^[0-9]+$/.test(id)) { - var num = +id - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num - } - } - return id - }) - } +var NONNUMERICIDENTIFIER = R++ +src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' - this.build = m[5] ? m[5].split('.') : [] - this.format() -} +// ## Main Version +// Three dot-separated numeric identifiers. -SemVer.prototype.format = function () { - this.version = this.major + '.' + this.minor + '.' + this.patch - if (this.prerelease.length) { - this.version += '-' + this.prerelease.join('.') - } - return this.version -} +var MAINVERSION = R++ +src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')' -SemVer.prototype.toString = function () { - return this.version -} +var MAINVERSIONLOOSE = R++ +src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')' -SemVer.prototype.compare = function (other) { - debug('SemVer.compare', this.version, this.options, other) - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. - return this.compareMain(other) || this.comparePre(other) -} +var PRERELEASEIDENTIFIER = R++ +src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + + '|' + src[NONNUMERICIDENTIFIER] + ')' + +var PRERELEASEIDENTIFIERLOOSE = R++ +src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + + '|' + src[NONNUMERICIDENTIFIER] + ')' + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +var PRERELEASE = R++ +src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + + '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' -SemVer.prototype.compareMain = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } +var PRERELEASELOOSE = R++ +src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' - return compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch) -} +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. -SemVer.prototype.comparePre = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } +var BUILDIDENTIFIER = R++ +src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+' - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) { - return -1 - } else if (!this.prerelease.length && other.prerelease.length) { - return 1 - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0 - } +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. - var i = 0 - do { - var a = this.prerelease[i] - var b = other.prerelease[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) -} +var BUILD = R++ +src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + + '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' -// preminor will bump the version up to the next minor release, and immediately -// down to pre-release. premajor and prepatch work the same way. -SemVer.prototype.inc = function (release, identifier) { - switch (release) { - case 'premajor': - this.prerelease.length = 0 - this.patch = 0 - this.minor = 0 - this.major++ - this.inc('pre', identifier) - break - case 'preminor': - this.prerelease.length = 0 - this.patch = 0 - this.minor++ - this.inc('pre', identifier) - break - case 'prepatch': - // If this is already a prerelease, it will bump to the next version - // drop any prereleases that might already exist, since they are not - // relevant at this point. - this.prerelease.length = 0 - this.inc('patch', identifier) - this.inc('pre', identifier) - break - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case 'prerelease': - if (this.prerelease.length === 0) { - this.inc('patch', identifier) - } - this.inc('pre', identifier) - break +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. - case 'major': - // If this is a pre-major version, bump up to the same major version. - // Otherwise increment major. - // 1.0.0-5 bumps to 1.0.0 - // 1.1.0 bumps to 2.0.0 - if (this.minor !== 0 || - this.patch !== 0 || - this.prerelease.length === 0) { - this.major++ - } - this.minor = 0 - this.patch = 0 - this.prerelease = [] - break - case 'minor': - // If this is a pre-minor version, bump up to the same minor version. - // Otherwise increment minor. - // 1.2.0-5 bumps to 1.2.0 - // 1.2.1 bumps to 1.3.0 - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++ - } - this.patch = 0 - this.prerelease = [] - break - case 'patch': - // If this is not a pre-release version, it will increment the patch. - // If it is a pre-release it will bump up to the same patch version. - // 1.2.0-5 patches to 1.2.0 - // 1.2.0 patches to 1.2.1 - if (this.prerelease.length === 0) { - this.patch++ - } - this.prerelease = [] - break - // This probably shouldn't be used publicly. - // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. - case 'pre': - if (this.prerelease.length === 0) { - this.prerelease = [0] - } else { - var i = this.prerelease.length - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++ - i = -2 - } - } - if (i === -1) { - // didn't increment anything - this.prerelease.push(0) - } - } - if (identifier) { - // 1.2.0-beta.1 bumps to 1.2.0-beta.2, - // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 - if (this.prerelease[0] === identifier) { - if (isNaN(this.prerelease[1])) { - this.prerelease = [identifier, 0] - } - } else { - this.prerelease = [identifier, 0] - } - } - break +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. - default: - throw new Error('invalid increment argument: ' + release) - } - this.format() - this.raw = this.version - return this -} +var FULL = R++ +var FULLPLAIN = 'v?' + src[MAINVERSION] + + src[PRERELEASE] + '?' + + src[BUILD] + '?' -exports.inc = inc -function inc (version, release, loose, identifier) { - if (typeof (loose) === 'string') { - identifier = loose - loose = undefined - } +src[FULL] = '^' + FULLPLAIN + '$' - try { - return new SemVer(version, loose).inc(release, identifier).version - } catch (er) { - return null - } -} +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + + src[PRERELEASELOOSE] + '?' + + src[BUILD] + '?' -exports.diff = diff -function diff (version1, version2) { - if (eq(version1, version2)) { - return null - } else { - var v1 = parse(version1) - var v2 = parse(version2) - var prefix = '' - if (v1.prerelease.length || v2.prerelease.length) { - prefix = 'pre' - var defaultResult = 'prerelease' - } - for (var key in v1) { - if (key === 'major' || key === 'minor' || key === 'patch') { - if (v1[key] !== v2[key]) { - return prefix + key - } - } - } - return defaultResult // may be undefined - } -} +var LOOSE = R++ +src[LOOSE] = '^' + LOOSEPLAIN + '$' -exports.compareIdentifiers = compareIdentifiers +var GTLT = R++ +src[GTLT] = '((?:<|>)?=?)' -var numeric = /^[0-9]+$/ -function compareIdentifiers (a, b) { - var anum = numeric.test(a) - var bnum = numeric.test(b) +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +var XRANGEIDENTIFIERLOOSE = R++ +src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +var XRANGEIDENTIFIER = R++ +src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' - if (anum && bnum) { - a = +a - b = +b - } +var XRANGEPLAIN = R++ +src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:' + src[PRERELEASE] + ')?' + + src[BUILD] + '?' + + ')?)?' - return a === b ? 0 - : (anum && !bnum) ? -1 - : (bnum && !anum) ? 1 - : a < b ? -1 - : 1 -} +var XRANGEPLAINLOOSE = R++ +src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[PRERELEASELOOSE] + ')?' + + src[BUILD] + '?' + + ')?)?' -exports.rcompareIdentifiers = rcompareIdentifiers -function rcompareIdentifiers (a, b) { - return compareIdentifiers(b, a) -} +var XRANGE = R++ +src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' +var XRANGELOOSE = R++ +src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' -exports.major = major -function major (a, loose) { - return new SemVer(a, loose).major -} +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +var COERCE = R++ +src[COERCE] = '(?:^|[^\\d])' + + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:$|[^\\d])' -exports.minor = minor -function minor (a, loose) { - return new SemVer(a, loose).minor -} +// Tilde ranges. +// Meaning is "reasonably at or greater than" +var LONETILDE = R++ +src[LONETILDE] = '(?:~>?)' -exports.patch = patch -function patch (a, loose) { - return new SemVer(a, loose).patch -} +var TILDETRIM = R++ +src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' +re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') +var tildeTrimReplace = '$1~' -exports.compare = compare -function compare (a, b, loose) { - return new SemVer(a, loose).compare(new SemVer(b, loose)) -} +var TILDE = R++ +src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' +var TILDELOOSE = R++ +src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' -exports.compareLoose = compareLoose -function compareLoose (a, b) { - return compare(a, b, true) -} +// Caret ranges. +// Meaning is "at least and backwards compatible with" +var LONECARET = R++ +src[LONECARET] = '(?:\\^)' -exports.rcompare = rcompare -function rcompare (a, b, loose) { - return compare(b, a, loose) -} +var CARETTRIM = R++ +src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' +re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') +var caretTrimReplace = '$1^' -exports.sort = sort -function sort (list, loose) { - return list.sort(function (a, b) { - return exports.compare(a, b, loose) - }) -} +var CARET = R++ +src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' +var CARETLOOSE = R++ +src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' -exports.rsort = rsort -function rsort (list, loose) { - return list.sort(function (a, b) { - return exports.rcompare(a, b, loose) - }) -} +// A simple gt/lt/eq thing, or just "" to indicate "any version" +var COMPARATORLOOSE = R++ +src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' +var COMPARATOR = R++ +src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' -exports.gt = gt -function gt (a, b, loose) { - return compare(a, b, loose) > 0 -} +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +var COMPARATORTRIM = R++ +src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + + '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' -exports.lt = lt -function lt (a, b, loose) { - return compare(a, b, loose) < 0 -} +// this one has to use the /g flag +re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') +var comparatorTrimReplace = '$1$2$3' -exports.eq = eq -function eq (a, b, loose) { - return compare(a, b, loose) === 0 -} +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +var HYPHENRANGE = R++ +src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAIN] + ')' + + '\\s*$' -exports.neq = neq -function neq (a, b, loose) { - return compare(a, b, loose) !== 0 -} +var HYPHENRANGELOOSE = R++ +src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s*$' -exports.gte = gte -function gte (a, b, loose) { - return compare(a, b, loose) >= 0 -} +// Star ranges basically just allow anything at all. +var STAR = R++ +src[STAR] = '(<|>)?=?\\s*\\*' -exports.lte = lte -function lte (a, b, loose) { - return compare(a, b, loose) <= 0 +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]) + if (!re[i]) { + re[i] = new RegExp(src[i]) + } } -exports.cmp = cmp -function cmp (a, op, b, loose) { - switch (op) { - case '===': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a === b - - case '!==': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a !== b +exports.parse = parse +function parse (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } - case '': - case '=': - case '==': - return eq(a, b, loose) + if (version instanceof SemVer) { + return version + } - case '!=': - return neq(a, b, loose) + if (typeof version !== 'string') { + return null + } - case '>': - return gt(a, b, loose) + if (version.length > MAX_LENGTH) { + return null + } - case '>=': - return gte(a, b, loose) + var r = options.loose ? re[LOOSE] : re[FULL] + if (!r.test(version)) { + return null + } - case '<': - return lt(a, b, loose) + try { + return new SemVer(version, options) + } catch (er) { + return null + } +} - case '<=': - return lte(a, b, loose) +exports.valid = valid +function valid (version, options) { + var v = parse(version, options) + return v ? v.version : null +} - default: - throw new TypeError('Invalid operator: ' + op) - } +exports.clean = clean +function clean (version, options) { + var s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null } -exports.Comparator = Comparator -function Comparator (comp, options) { +exports.SemVer = SemVer + +function SemVer (version, options) { if (!options || typeof options !== 'object') { options = { loose: !!options, includePrerelease: false } } - - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version } else { - comp = comp.value + version = version.version } + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version) } - if (!(this instanceof Comparator)) { - return new Comparator(comp, options) + if (version.length > MAX_LENGTH) { + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') } - debug('comparator', comp, options) + if (!(this instanceof SemVer)) { + return new SemVer(version, options) + } + + debug('SemVer', version, options) this.options = options this.loose = !!options.loose - this.parse(comp) - if (this.semver === ANY) { - this.value = '' - } else { - this.value = this.operator + this.semver.version + var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL]) + + if (!m) { + throw new TypeError('Invalid Version: ' + version) } - debug('comp', this) -} + this.raw = version -var ANY = {} -Comparator.prototype.parse = function (comp) { - var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR] - var m = comp.match(r) + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] - if (!m) { - throw new TypeError('Invalid comparator: ' + comp) + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') } - this.operator = m[1] - if (this.operator === '=') { - this.operator = '' + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') } - // if it literally is just '>' or '' then allow anything. - if (!m[2]) { - this.semver = ANY + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] } else { - this.semver = new SemVer(m[2], this.options.loose) + this.prerelease = m[4].split('.').map(function (id) { + if (/^[0-9]+$/.test(id)) { + var num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) } + + this.build = m[5] ? m[5].split('.') : [] + this.format() } -Comparator.prototype.toString = function () { - return this.value +SemVer.prototype.format = function () { + this.version = this.major + '.' + this.minor + '.' + this.patch + if (this.prerelease.length) { + this.version += '-' + this.prerelease.join('.') + } + return this.version } -Comparator.prototype.test = function (version) { - debug('Comparator.test', version, this.options.loose) +SemVer.prototype.toString = function () { + return this.version +} - if (this.semver === ANY) { - return true +SemVer.prototype.compare = function (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) } - if (typeof version === 'string') { - version = new SemVer(version, this.options) + return this.compareMain(other) || this.comparePre(other) +} + +SemVer.prototype.compareMain = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) } - return cmp(version, this.operator, this.semver, this.options) + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) } -Comparator.prototype.intersects = function (comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError('a Comparator is required') +SemVer.prototype.comparePre = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) } - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 } - var rangeTmp + var i = 0 + do { + var a = this.prerelease[i] + var b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} - if (this.operator === '') { - rangeTmp = new Range(comp.value, options) - return satisfies(this.value, rangeTmp, options) - } else if (comp.operator === '') { - rangeTmp = new Range(this.value, options) - return satisfies(comp.semver, rangeTmp, options) - } +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) + } + this.inc('pre', identifier) + break - var sameDirectionIncreasing = - (this.operator === '>=' || this.operator === '>') && - (comp.operator === '>=' || comp.operator === '>') - var sameDirectionDecreasing = - (this.operator === '<=' || this.operator === '<') && - (comp.operator === '<=' || comp.operator === '<') - var sameSemVer = this.semver.version === comp.semver.version - var differentDirectionsInclusive = - (this.operator === '>=' || this.operator === '<=') && - (comp.operator === '>=' || comp.operator === '<=') - var oppositeDirectionsLessThan = - cmp(this.semver, '<', comp.semver, options) && - ((this.operator === '>=' || this.operator === '>') && - (comp.operator === '<=' || comp.operator === '<')) - var oppositeDirectionsGreaterThan = - cmp(this.semver, '>', comp.semver, options) && - ((this.operator === '<=' || this.operator === '<') && - (comp.operator === '>=' || comp.operator === '>')) + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + var i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] + } + } + break - return sameDirectionIncreasing || sameDirectionDecreasing || - (sameSemVer && differentDirectionsInclusive) || - oppositeDirectionsLessThan || oppositeDirectionsGreaterThan + default: + throw new Error('invalid increment argument: ' + release) + } + this.format() + this.raw = this.version + return this } -exports.Range = Range -function Range (range, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } +exports.inc = inc +function inc (version, release, loose, identifier) { + if (typeof (loose) === 'string') { + identifier = loose + loose = undefined } - if (range instanceof Range) { - if (range.loose === !!options.loose && - range.includePrerelease === !!options.includePrerelease) { - return range - } else { - return new Range(range.raw, options) + try { + return new SemVer(version, loose).inc(release, identifier).version + } catch (er) { + return null + } +} + +exports.diff = diff +function diff (version1, version2) { + if (eq(version1, version2)) { + return null + } else { + var v1 = parse(version1) + var v2 = parse(version2) + var prefix = '' + if (v1.prerelease.length || v2.prerelease.length) { + prefix = 'pre' + var defaultResult = 'prerelease' } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key + } + } + } + return defaultResult // may be undefined } +} - if (range instanceof Comparator) { - return new Range(range.value, options) - } - - if (!(this instanceof Range)) { - return new Range(range, options) - } - - this.options = options - this.loose = !!options.loose - this.includePrerelease = !!options.includePrerelease +exports.compareIdentifiers = compareIdentifiers - // First, split based on boolean or || - this.raw = range - this.set = range.split(/\s*\|\|\s*/).map(function (range) { - return this.parseRange(range.trim()) - }, this).filter(function (c) { - // throw out any that are not relevant for whatever reason - return c.length - }) +var numeric = /^[0-9]+$/ +function compareIdentifiers (a, b) { + var anum = numeric.test(a) + var bnum = numeric.test(b) - if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + range) + if (anum && bnum) { + a = +a + b = +b } - this.format() + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 } -Range.prototype.format = function () { - this.range = this.set.map(function (comps) { - return comps.join(' ').trim() - }).join('||').trim() - return this.range +exports.rcompareIdentifiers = rcompareIdentifiers +function rcompareIdentifiers (a, b) { + return compareIdentifiers(b, a) } -Range.prototype.toString = function () { - return this.range +exports.major = major +function major (a, loose) { + return new SemVer(a, loose).major } -Range.prototype.parseRange = function (range) { - var loose = this.options.loose - range = range.trim() - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE] - range = range.replace(hr, hyphenReplace) - debug('hyphen replace', range) - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range, re[COMPARATORTRIM]) - - // `~ 1.2.3` => `~1.2.3` - range = range.replace(re[TILDETRIM], tildeTrimReplace) - - // `^ 1.2.3` => `^1.2.3` - range = range.replace(re[CARETTRIM], caretTrimReplace) - - // normalize spaces - range = range.split(/\s+/).join(' ') +exports.minor = minor +function minor (a, loose) { + return new SemVer(a, loose).minor +} - // At this point, the range is completely trimmed and - // ready to be split into comparators. +exports.patch = patch +function patch (a, loose) { + return new SemVer(a, loose).patch +} - var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR] - var set = range.split(' ').map(function (comp) { - return parseComparator(comp, this.options) - }, this).join(' ').split(/\s+/) - if (this.options.loose) { - // in loose mode, throw out any that are not valid comparators - set = set.filter(function (comp) { - return !!comp.match(compRe) - }) - } - set = set.map(function (comp) { - return new Comparator(comp, this.options) - }, this) +exports.compare = compare +function compare (a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)) +} - return set +exports.compareLoose = compareLoose +function compareLoose (a, b) { + return compare(a, b, true) } -Range.prototype.intersects = function (range, options) { - if (!(range instanceof Range)) { - throw new TypeError('a Range is required') - } +exports.rcompare = rcompare +function rcompare (a, b, loose) { + return compare(b, a, loose) +} - return this.set.some(function (thisComparators) { - return thisComparators.every(function (thisComparator) { - return range.set.some(function (rangeComparators) { - return rangeComparators.every(function (rangeComparator) { - return thisComparator.intersects(rangeComparator, options) - }) - }) - }) +exports.sort = sort +function sort (list, loose) { + return list.sort(function (a, b) { + return exports.compare(a, b, loose) }) } -// Mostly just for testing and legacy API reasons -exports.toComparators = toComparators -function toComparators (range, options) { - return new Range(range, options).set.map(function (comp) { - return comp.map(function (c) { - return c.value - }).join(' ').trim().split(' ') +exports.rsort = rsort +function rsort (list, loose) { + return list.sort(function (a, b) { + return exports.rcompare(a, b, loose) }) } -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -function parseComparator (comp, options) { - debug('comp', comp, options) - comp = replaceCarets(comp, options) - debug('caret', comp) - comp = replaceTildes(comp, options) - debug('tildes', comp) - comp = replaceXRanges(comp, options) - debug('xrange', comp) - comp = replaceStars(comp, options) - debug('stars', comp) - return comp +exports.gt = gt +function gt (a, b, loose) { + return compare(a, b, loose) > 0 } -function isX (id) { - return !id || id.toLowerCase() === 'x' || id === '*' +exports.lt = lt +function lt (a, b, loose) { + return compare(a, b, loose) < 0 } -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 -function replaceTildes (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceTilde(comp, options) - }).join(' ') +exports.eq = eq +function eq (a, b, loose) { + return compare(a, b, loose) === 0 } -function replaceTilde (comp, options) { - var r = options.loose ? re[TILDELOOSE] : re[TILDE] - return comp.replace(r, function (_, M, m, p, pr) { - debug('tilde', comp, _, M, m, p, pr) - var ret +exports.neq = neq +function neq (a, b, loose) { + return compare(a, b, loose) !== 0 +} - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - // ~1.2 == >=1.2.0 <1.3.0 - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else if (pr) { - debug('replaceTilde pr', pr) - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } else { - // ~1.2.3 == >=1.2.3 <1.3.0 - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } +exports.gte = gte +function gte (a, b, loose) { + return compare(a, b, loose) >= 0 +} - debug('tilde return', ret) - return ret - }) +exports.lte = lte +function lte (a, b, loose) { + return compare(a, b, loose) <= 0 } -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 -// ^1.2.3 --> >=1.2.3 <2.0.0 -// ^1.2.0 --> >=1.2.0 <2.0.0 -function replaceCarets (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceCaret(comp, options) - }).join(' ') +exports.cmp = cmp +function cmp (a, op, b, loose) { + switch (op) { + case '===': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a === b + + case '!==': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError('Invalid operator: ' + op) + } } -function replaceCaret (comp, options) { - debug('caret', comp, options) - var r = options.loose ? re[CARETLOOSE] : re[CARET] - return comp.replace(r, function (_, M, m, p, pr) { - debug('caret', comp, _, M, m, p, pr) - var ret +exports.Comparator = Comparator +function Comparator (comp, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - if (M === '0') { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else { - ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' - } - } else if (pr) { - debug('replaceCaret pr', pr) - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + (+M + 1) + '.0.0' - } + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp } else { - debug('no pr') - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + (+M + 1) + '.0.0' - } + comp = comp.value } + } - debug('caret return', ret) - return ret - }) -} + if (!(this instanceof Comparator)) { + return new Comparator(comp, options) + } -function replaceXRanges (comp, options) { - debug('replaceXRanges', comp, options) - return comp.split(/\s+/).map(function (comp) { - return replaceXRange(comp, options) - }).join(' ') -} + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) -function replaceXRange (comp, options) { - comp = comp.trim() - var r = options.loose ? re[XRANGELOOSE] : re[XRANGE] - return comp.replace(r, function (ret, gtlt, M, m, p, pr) { - debug('xRange', comp, ret, gtlt, M, m, p, pr) - var xM = isX(M) - var xm = xM || isX(m) - var xp = xm || isX(p) - var anyX = xp + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } - if (gtlt === '=' && anyX) { - gtlt = '' - } + debug('comp', this) +} - if (xM) { - if (gtlt === '>' || gtlt === '<') { - // nothing is allowed - ret = '<0.0.0' - } else { - // nothing is forbidden - ret = '*' - } - } else if (gtlt && anyX) { - // we know patch is an x, because we have any x at all. - // replace X with 0 - if (xm) { - m = 0 - } - p = 0 +var ANY = {} +Comparator.prototype.parse = function (comp) { + var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR] + var m = comp.match(r) - if (gtlt === '>') { - // >1 => >=2.0.0 - // >1.2 => >=1.3.0 - // >1.2.3 => >= 1.2.4 - gtlt = '>=' - if (xm) { - M = +M + 1 - m = 0 - p = 0 - } else { - m = +m + 1 - p = 0 - } - } else if (gtlt === '<=') { - // <=0.7.x is actually <0.8.0, since any 0.7.x should - // pass. Similarly, <=7.x is actually <8.0.0, etc. - gtlt = '<' - if (xm) { - M = +M + 1 - } else { - m = +m + 1 - } - } + if (!m) { + throw new TypeError('Invalid comparator: ' + comp) + } - ret = gtlt + M + '.' + m + '.' + p - } else if (xm) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (xp) { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } + this.operator = m[1] + if (this.operator === '=') { + this.operator = '' + } - debug('xRange return', ret) + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } +} - return ret - }) +Comparator.prototype.toString = function () { + return this.value } -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -function replaceStars (comp, options) { - debug('replaceStars', comp, options) - // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[STAR], '') +Comparator.prototype.test = function (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY) { + return true + } + + if (typeof version === 'string') { + version = new SemVer(version, this.options) + } + + return cmp(version, this.operator, this.semver, this.options) } -// This function is passed to string.replace(re[HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0 <3.5.0 -function hyphenReplace ($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr, tb) { - if (isX(fM)) { - from = '' - } else if (isX(fm)) { - from = '>=' + fM + '.0.0' - } else if (isX(fp)) { - from = '>=' + fM + '.' + fm + '.0' - } else { - from = '>=' + from +Comparator.prototype.intersects = function (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') } - if (isX(tM)) { - to = '' - } else if (isX(tm)) { - to = '<' + (+tM + 1) + '.0.0' - } else if (isX(tp)) { - to = '<' + tM + '.' + (+tm + 1) + '.0' - } else if (tpr) { - to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr - } else { - to = '<=' + to + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } } - return (from + ' ' + to).trim() + var rangeTmp + + if (this.operator === '') { + rangeTmp = new Range(comp.value, options) + return satisfies(this.value, rangeTmp, options) + } else if (comp.operator === '') { + rangeTmp = new Range(this.value, options) + return satisfies(comp.semver, rangeTmp, options) + } + + var sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + var sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + var sameSemVer = this.semver.version === comp.semver.version + var differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + var oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + ((this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<')) + var oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + ((this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>')) + + return sameDirectionIncreasing || sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan } -// if ANY of the sets match ALL of its comparators, then pass -Range.prototype.test = function (version) { - if (!version) { - return false +exports.Range = Range +function Range (range, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } } - if (typeof version === 'string') { - version = new SemVer(version, this.options) + if (range instanceof Range) { + if (range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease) { + return range + } else { + return new Range(range.raw, options) + } } - for (var i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version, this.options)) { - return true - } + if (range instanceof Comparator) { + return new Range(range.value, options) } - return false -} -function testSet (set, version, options) { - for (var i = 0; i < set.length; i++) { - if (!set[i].test(version)) { - return false - } + if (!(this instanceof Range)) { + return new Range(range, options) } - if (version.prerelease.length && !options.includePrerelease) { - // Find the set of versions that are allowed to have prereleases - // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 - // That should allow `1.2.3-pr.2` to pass. - // However, `1.2.4-alpha.notready` should NOT be allowed, - // even though it's within the range set by the comparators. - for (i = 0; i < set.length; i++) { - debug(set[i].semver) - if (set[i].semver === ANY) { - continue - } + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease - if (set[i].semver.prerelease.length > 0) { - var allowed = set[i].semver - if (allowed.major === version.major && - allowed.minor === version.minor && - allowed.patch === version.patch) { - return true - } - } - } + // First, split based on boolean or || + this.raw = range + this.set = range.split(/\s*\|\|\s*/).map(function (range) { + return this.parseRange(range.trim()) + }, this).filter(function (c) { + // throw out any that are not relevant for whatever reason + return c.length + }) - // Version has a -pre, but it's not one of the ones we like. - return false + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + range) } - return true + this.format() } -exports.satisfies = satisfies -function satisfies (version, range, options) { - try { - range = new Range(range, options) - } catch (er) { - return false - } - return range.test(version) +Range.prototype.format = function () { + this.range = this.set.map(function (comps) { + return comps.join(' ').trim() + }).join('||').trim() + return this.range } -exports.maxSatisfying = maxSatisfying -function maxSatisfying (versions, range, options) { - var max = null - var maxSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!max || maxSV.compare(v) === -1) { - // compare(max, v, true) - max = v - maxSV = new SemVer(max, options) - } - } - }) - return max +Range.prototype.toString = function () { + return this.range } -exports.minSatisfying = minSatisfying -function minSatisfying (versions, range, options) { - var min = null - var minSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!min || minSV.compare(v) === 1) { - // compare(min, v, true) - min = v - minSV = new SemVer(min, options) - } - } - }) - return min -} +Range.prototype.parseRange = function (range) { + var loose = this.options.loose + range = range.trim() + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE] + range = range.replace(hr, hyphenReplace) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, re[COMPARATORTRIM]) -exports.minVersion = minVersion -function minVersion (range, loose) { - range = new Range(range, loose) + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[TILDETRIM], tildeTrimReplace) - var minver = new SemVer('0.0.0') - if (range.test(minver)) { - return minver - } + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[CARETTRIM], caretTrimReplace) - minver = new SemVer('0.0.0-0') - if (range.test(minver)) { - return minver - } + // normalize spaces + range = range.split(/\s+/).join(' ') - minver = null - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] + // At this point, the range is completely trimmed and + // ready to be split into comparators. - comparators.forEach(function (comparator) { - // Clone to avoid manipulating the comparator's semver object. - var compver = new SemVer(comparator.semver.version) - switch (comparator.operator) { - case '>': - if (compver.prerelease.length === 0) { - compver.patch++ - } else { - compver.prerelease.push(0) - } - compver.raw = compver.format() - /* fallthrough */ - case '': - case '>=': - if (!minver || gt(minver, compver)) { - minver = compver - } - break - case '<': - case '<=': - /* Ignore maximum versions */ - break - /* istanbul ignore next */ - default: - throw new Error('Unexpected operation: ' + comparator.operator) - } + var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR] + var set = range.split(' ').map(function (comp) { + return parseComparator(comp, this.options) + }, this).join(' ').split(/\s+/) + if (this.options.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function (comp) { + return !!comp.match(compRe) }) } + set = set.map(function (comp) { + return new Comparator(comp, this.options) + }, this) - if (minver && range.test(minver)) { - return minver + return set +} + +Range.prototype.intersects = function (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') } - return null + return this.set.some(function (thisComparators) { + return thisComparators.every(function (thisComparator) { + return range.set.some(function (rangeComparators) { + return rangeComparators.every(function (rangeComparator) { + return thisComparator.intersects(rangeComparator, options) + }) + }) + }) + }) } -exports.validRange = validRange -function validRange (range, options) { - try { - // Return '*' instead of '' so that truthiness works. - // This will throw if it's invalid anyway - return new Range(range, options).range || '*' - } catch (er) { - return null - } +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators +function toComparators (range, options) { + return new Range(range, options).set.map(function (comp) { + return comp.map(function (c) { + return c.value + }).join(' ').trim().split(' ') + }) +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator (comp, options) { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +function isX (id) { + return !id || id.toLowerCase() === 'x' || id === '*' +} + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceTilde(comp, options) + }).join(' ') } -// Determine if version is less than all the versions possible in the range -exports.ltr = ltr -function ltr (version, range, options) { - return outside(version, range, '<', options) +function replaceTilde (comp, options) { + var r = options.loose ? re[TILDELOOSE] : re[TILDE] + return comp.replace(r, function (_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else if (pr) { + debug('replaceTilde pr', pr) + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } else { + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + + debug('tilde return', ret) + return ret + }) } -// Determine if version is greater than all the versions possible in the range. -exports.gtr = gtr -function gtr (version, range, options) { - return outside(version, range, '>', options) +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceCaret(comp, options) + }).join(' ') } -exports.outside = outside -function outside (version, range, hilo, options) { - version = new SemVer(version, options) - range = new Range(range, options) +function replaceCaret (comp, options) { + debug('caret', comp, options) + var r = options.loose ? re[CARETLOOSE] : re[CARET] + return comp.replace(r, function (_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr) + var ret - var gtfn, ltefn, ltfn, comp, ecomp - switch (hilo) { - case '>': - gtfn = gt - ltefn = lte - ltfn = lt - comp = '>' - ecomp = '>=' - break - case '<': - gtfn = lt - ltefn = gte - ltfn = gt - comp = '<' - ecomp = '<=' - break - default: - throw new TypeError('Must provide a hilo val of "<" or ">"') - } + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + if (M === '0') { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else { + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + (+M + 1) + '.0.0' + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0' + } + } - // If it satisifes the range it is not outside - if (satisfies(version, range, options)) { - return false - } + debug('caret return', ret) + return ret + }) +} - // From now on, variable terms are as if we're in "gtr" mode. - // but note that everything is flipped for the "ltr" function. +function replaceXRanges (comp, options) { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map(function (comp) { + return replaceXRange(comp, options) + }).join(' ') +} - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] +function replaceXRange (comp, options) { + comp = comp.trim() + var r = options.loose ? re[XRANGELOOSE] : re[XRANGE] + return comp.replace(r, function (ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + var xM = isX(M) + var xm = xM || isX(m) + var xp = xm || isX(p) + var anyX = xp - var high = null - var low = null + if (gtlt === '=' && anyX) { + gtlt = '' + } - comparators.forEach(function (comparator) { - if (comparator.semver === ANY) { - comparator = new Comparator('>=0.0.0') + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0' + } else { + // nothing is forbidden + ret = '*' } - high = high || comparator - low = low || comparator - if (gtfn(comparator.semver, high.semver, options)) { - high = comparator - } else if (ltfn(comparator.semver, low.semver, options)) { - low = comparator + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 } - }) + p = 0 - // If the edge version comparator has a operator then our version - // isn't outside it - if (high.operator === comp || high.operator === ecomp) { - return false - } + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } - // If the lowest version comparator has an operator and our version - // is less than it then it isn't higher than the range - if ((!low.operator || low.operator === comp) && - ltefn(version, low.semver)) { - return false - } else if (low.operator === ecomp && ltfn(version, low.semver)) { - return false + ret = gtlt + M + '.' + m + '.' + p + } else if (xm) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (xp) { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' } - } - return true -} -exports.prerelease = prerelease -function prerelease (version, options) { - var parsed = parse(version, options) - return (parsed && parsed.prerelease.length) ? parsed.prerelease : null -} + debug('xRange return', ret) -exports.intersects = intersects -function intersects (r1, r2, options) { - r1 = new Range(r1, options) - r2 = new Range(r2, options) - return r1.intersects(r2) + return ret + }) } -exports.coerce = coerce -function coerce (version) { - if (version instanceof SemVer) { - return version - } +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars (comp, options) { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(re[STAR], '') +} - if (typeof version !== 'string') { - return null +// This function is passed to string.replace(re[HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = '>=' + fM + '.0.0' + } else if (isX(fp)) { + from = '>=' + fM + '.' + fm + '.0' + } else { + from = '>=' + from } - var match = version.match(re[COERCE]) - - if (match == null) { - return null + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = '<' + (+tM + 1) + '.0.0' + } else if (isX(tp)) { + to = '<' + tM + '.' + (+tm + 1) + '.0' + } else if (tpr) { + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr + } else { + to = '<=' + to } - return parse(match[1] + - '.' + (match[2] || '0') + - '.' + (match[3] || '0')) + return (from + ' ' + to).trim() } +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function (version) { + if (!version) { + return false + } -/***/ }), - -/***/ 914: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; + if (typeof version === 'string') { + version = new SemVer(version, this.options) + } -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; -const httpClient = __importStar(__webpack_require__(925)); -function getAuthString(token, options) { - if (!token && !options.auth) { - throw new Error('Parameter token or opts.auth is required'); - } - else if (token && options.auth) { - throw new Error('Parameters token and opts.auth may not both be specified'); + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true } - return typeof options.auth === 'string' ? options.auth : `token ${token}`; -} -exports.getAuthString = getAuthString; -function getProxyAgent(destinationUrl) { - const hc = new httpClient.HttpClient(); - return hc.getAgent(destinationUrl); -} -exports.getProxyAgent = getProxyAgent; -function getApiBaseUrl() { - return process.env['GITHUB_API_URL'] || 'https://api.github.com'; + } + return false } -exports.getApiBaseUrl = getApiBaseUrl; -//# sourceMappingURL=utils.js.map -/***/ }), - -/***/ 925: -/***/ (function(__unusedmodule, exports, __webpack_require__) { +function testSet (set, version, options) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } -"use strict"; + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === ANY) { + continue + } -Object.defineProperty(exports, "__esModule", { value: true }); -const url = __webpack_require__(835); -const http = __webpack_require__(605); -const https = __webpack_require__(211); -const pm = __webpack_require__(443); -let tunnel; -var HttpCodes; -(function (HttpCodes) { - HttpCodes[HttpCodes["OK"] = 200] = "OK"; - HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; - HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; - HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; - HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; - HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; - HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; - HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; - HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; - HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; - HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; - HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; - HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; - HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; - HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; - HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; - HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; - HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; - HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; - HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; - HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; - HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; - HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; - HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; - HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; - HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; - HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); -var Headers; -(function (Headers) { - Headers["Accept"] = "accept"; - Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); -var MediaTypes; -(function (MediaTypes) { - MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); -/** - * Returns the proxy URL, depending upon the supplied url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ -function getProxyUrl(serverUrl) { - let proxyUrl = pm.getProxyUrl(url.parse(serverUrl)); - return proxyUrl ? proxyUrl.href : ''; -} -exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [ - HttpCodes.MovedPermanently, - HttpCodes.ResourceMoved, - HttpCodes.SeeOther, - HttpCodes.TemporaryRedirect, - HttpCodes.PermanentRedirect -]; -const HttpResponseRetryCodes = [ - HttpCodes.BadGateway, - HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout -]; -const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; -const ExponentialBackoffCeiling = 10; -const ExponentialBackoffTimeSlice = 5; -class HttpClientResponse { - constructor(message) { - this.message = message; - } - readBody() { - return new Promise(async (resolve, reject) => { - let output = Buffer.alloc(0); - this.message.on('data', (chunk) => { - output = Buffer.concat([output, chunk]); - }); - this.message.on('end', () => { - resolve(output.toString()); - }); - }); - } -} -exports.HttpClientResponse = HttpClientResponse; -function isHttps(requestUrl) { - let parsedUrl = url.parse(requestUrl); - return parsedUrl.protocol === 'https:'; -} -exports.isHttps = isHttps; -class HttpClient { - constructor(userAgent, handlers, requestOptions) { - this._ignoreSslError = false; - this._allowRedirects = true; - this._allowRedirectDowngrade = false; - this._maxRedirects = 50; - this._allowRetries = false; - this._maxRetries = 1; - this._keepAlive = false; - this._disposed = false; - this.userAgent = userAgent; - this.handlers = handlers || []; - this.requestOptions = requestOptions; - if (requestOptions) { - if (requestOptions.ignoreSslError != null) { - this._ignoreSslError = requestOptions.ignoreSslError; - } - this._socketTimeout = requestOptions.socketTimeout; - if (requestOptions.allowRedirects != null) { - this._allowRedirects = requestOptions.allowRedirects; - } - if (requestOptions.allowRedirectDowngrade != null) { - this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; - } - if (requestOptions.maxRedirects != null) { - this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); - } - if (requestOptions.keepAlive != null) { - this._keepAlive = requestOptions.keepAlive; - } - if (requestOptions.allowRetries != null) { - this._allowRetries = requestOptions.allowRetries; - } - if (requestOptions.maxRetries != null) { - this._maxRetries = requestOptions.maxRetries; - } - } - } - options(requestUrl, additionalHeaders) { - return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); - } - get(requestUrl, additionalHeaders) { - return this.request('GET', requestUrl, null, additionalHeaders || {}); - } - del(requestUrl, additionalHeaders) { - return this.request('DELETE', requestUrl, null, additionalHeaders || {}); - } - post(requestUrl, data, additionalHeaders) { - return this.request('POST', requestUrl, data, additionalHeaders || {}); - } - patch(requestUrl, data, additionalHeaders) { - return this.request('PATCH', requestUrl, data, additionalHeaders || {}); - } - put(requestUrl, data, additionalHeaders) { - return this.request('PUT', requestUrl, data, additionalHeaders || {}); - } - head(requestUrl, additionalHeaders) { - return this.request('HEAD', requestUrl, null, additionalHeaders || {}); - } - sendStream(verb, requestUrl, stream, additionalHeaders) { - return this.request(verb, requestUrl, stream, additionalHeaders); - } - /** - * Gets a typed object from an endpoint - * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise - */ - async getJson(requestUrl, additionalHeaders = {}) { - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - let res = await this.get(requestUrl, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - async postJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.post(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } } - async putJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.put(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} + +exports.satisfies = satisfies +function satisfies (version, range, options) { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} + +exports.maxSatisfying = maxSatisfying +function maxSatisfying (versions, range, options) { + var max = null + var maxSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } } - async patchJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.patch(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); + }) + return max +} + +exports.minSatisfying = minSatisfying +function minSatisfying (versions, range, options) { + var min = null + var minSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } } - /** - * Makes a raw http request. - * All other methods such as get, post, patch, and request ultimately call this. - * Prefer get, del, post and patch - */ - async request(verb, requestUrl, data, headers) { - if (this._disposed) { - throw new Error('Client has already been disposed.'); - } - let parsedUrl = url.parse(requestUrl); - let info = this._prepareRequest(verb, parsedUrl, headers); - // Only perform retries on reads since writes may not be idempotent. - let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 - ? this._maxRetries + 1 - : 1; - let numTries = 0; - let response; - while (numTries < maxTries) { - response = await this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { - let authenticationHandler; - for (let i = 0; i < this.handlers.length; i++) { - if (this.handlers[i].canHandleAuthentication(response)) { - authenticationHandler = this.handlers[i]; - break; - } - } - if (authenticationHandler) { - return authenticationHandler.handleAuthentication(this, info, data); - } - else { - // We have received an unauthorized response but have no handlers to handle it. - // Let the response return to the caller. - return response; - } - } - let redirectsRemaining = this._maxRedirects; - while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; - if (!redirectUrl) { - // if there's no location to redirect to, we won't - break; - } - let parsedRedirectUrl = url.parse(redirectUrl); - if (parsedUrl.protocol == 'https:' && - parsedUrl.protocol != parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); - } - // we need to finish reading the response before reassigning response - // which will leak the open socket. - await response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (let header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; - } - } - } - // let's make the request with the new redirectUrl - info = this._prepareRequest(verb, parsedRedirectUrl, headers); - response = await this.requestRaw(info, data); - redirectsRemaining--; - } - if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { - // If not a retry code, return immediately instead of retrying - return response; - } - numTries += 1; - if (numTries < maxTries) { - await response.readBody(); - await this._performExponentialBackoff(numTries); - } - } - return response; + }) + return min +} + +exports.minVersion = minVersion +function minVersion (range, loose) { + range = new Range(range, loose) + + var minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + comparators.forEach(function (comparator) { + // Clone to avoid manipulating the comparator's semver object. + var compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!minver || gt(minver, compver)) { + minver = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error('Unexpected operation: ' + comparator.operator) + } + }) + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} + +exports.validRange = validRange +function validRange (range, options) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} + +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr +function ltr (version, range, options) { + return outside(version, range, '<', options) +} + +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr +function gtr (version, range, options) { + return outside(version, range, '>', options) +} + +exports.outside = outside +function outside (version, range, hilo, options) { + version = new SemVer(version, options) + range = new Range(range, options) + + var gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisifes the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + var high = null + var low = null + + comparators.forEach(function (comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false } - /** - * Needs to be called if keepAlive is set to true in request options. - */ - dispose() { - if (this._agent) { - this._agent.destroy(); - } - this._disposed = true; + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false } - /** - * Raw request. - * @param info - * @param data - */ - requestRaw(info, data) { - return new Promise((resolve, reject) => { - let callbackForResult = function (err, res) { - if (err) { - reject(err); - } - resolve(res); - }; - this.requestRawWithCallback(info, data, callbackForResult); - }); + } + return true +} + +exports.prerelease = prerelease +function prerelease (version, options) { + var parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} + +exports.intersects = intersects +function intersects (r1, r2, options) { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) +} + +exports.coerce = coerce +function coerce (version) { + if (version instanceof SemVer) { + return version + } + + if (typeof version !== 'string') { + return null + } + + var match = version.match(re[COERCE]) + + if (match == null) { + return null + } + + return parse(match[1] + + '.' + (match[2] || '0') + + '.' + (match[3] || '0')) +} + + +/***/ }), + +/***/ 7032: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +"use strict"; + +var shebangRegex = __webpack_require__(2638); + +module.exports = function (str) { + var match = str.match(shebangRegex); + + if (!match) { + return null; + } + + var arr = match[0].replace(/#! ?/, '').split(' '); + var bin = arr[0].split('/').pop(); + var arg = arr[1]; + + return (bin === 'env' ? + arg : + bin + (arg ? ' ' + arg : '') + ); +}; + + +/***/ }), + +/***/ 2638: +/***/ ((module) => { + +"use strict"; + +module.exports = /^#!.*/; + + +/***/ }), + +/***/ 4931: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +// Note: since nyc uses this module to output coverage, any lines +// that are in the direct sync flow of nyc's outputCoverage are +// ignored, since we can never get coverage for them. +var assert = __webpack_require__(2357) +var signals = __webpack_require__(3710) +var isWin = /^win/i.test(process.platform) + +var EE = __webpack_require__(8614) +/* istanbul ignore if */ +if (typeof EE !== 'function') { + EE = EE.EventEmitter +} + +var emitter +if (process.__signal_exit_emitter__) { + emitter = process.__signal_exit_emitter__ +} else { + emitter = process.__signal_exit_emitter__ = new EE() + emitter.count = 0 + emitter.emitted = {} +} + +// Because this emitter is a global, we have to check to see if a +// previous version of this library failed to enable infinite listeners. +// I know what you're about to say. But literally everything about +// signal-exit is a compromise with evil. Get used to it. +if (!emitter.infinite) { + emitter.setMaxListeners(Infinity) + emitter.infinite = true +} + +module.exports = function (cb, opts) { + assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') + + if (loaded === false) { + load() + } + + var ev = 'exit' + if (opts && opts.alwaysLast) { + ev = 'afterexit' + } + + var remove = function () { + emitter.removeListener(ev, cb) + if (emitter.listeners('exit').length === 0 && + emitter.listeners('afterexit').length === 0) { + unload() } - /** - * Raw request with callback. - * @param info - * @param data - * @param onResult - */ - requestRawWithCallback(info, data, onResult) { - let socket; - if (typeof data === 'string') { - info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); - } - let callbackCalled = false; - let handleResult = (err, res) => { - if (!callbackCalled) { - callbackCalled = true; - onResult(err, res); - } - }; - let req = info.httpModule.request(info.options, (msg) => { - let res = new HttpClientResponse(msg); - handleResult(null, res); - }); - req.on('socket', sock => { - socket = sock; - }); - // If we ever get disconnected, we want the socket to timeout eventually - req.setTimeout(this._socketTimeout || 3 * 60000, () => { - if (socket) { - socket.end(); - } - handleResult(new Error('Request timeout: ' + info.options.path), null); - }); - req.on('error', function (err) { - // err has statusCode property - // res should have headers - handleResult(err, null); - }); - if (data && typeof data === 'string') { - req.write(data, 'utf8'); - } - if (data && typeof data !== 'string') { - data.on('close', function () { - req.end(); - }); - data.pipe(req); - } - else { - req.end(); - } + } + emitter.on(ev, cb) + + return remove +} + +module.exports.unload = unload +function unload () { + if (!loaded) { + return + } + loaded = false + + signals.forEach(function (sig) { + try { + process.removeListener(sig, sigListeners[sig]) + } catch (er) {} + }) + process.emit = originalProcessEmit + process.reallyExit = originalProcessReallyExit + emitter.count -= 1 +} + +function emit (event, code, signal) { + if (emitter.emitted[event]) { + return + } + emitter.emitted[event] = true + emitter.emit(event, code, signal) +} + +// { : , ... } +var sigListeners = {} +signals.forEach(function (sig) { + sigListeners[sig] = function listener () { + // If there are no other listeners, an exit is coming! + // Simplest way: remove us and then re-send the signal. + // We know that this will kill the process, so we can + // safely emit now. + var listeners = process.listeners(sig) + if (listeners.length === emitter.count) { + unload() + emit('exit', null, sig) + /* istanbul ignore next */ + emit('afterexit', null, sig) + /* istanbul ignore next */ + if (isWin && sig === 'SIGHUP') { + // "SIGHUP" throws an `ENOSYS` error on Windows, + // so use a supported signal instead + sig = 'SIGINT' + } + process.kill(process.pid, sig) } - /** - * Gets an http agent. This function is useful when you need an http agent that handles - * routing through a proxy server - depending upon the url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ - getAgent(serverUrl) { - let parsedUrl = url.parse(serverUrl); - return this._getAgent(parsedUrl); + } +}) + +module.exports.signals = function () { + return signals +} + +module.exports.load = load + +var loaded = false + +function load () { + if (loaded) { + return + } + loaded = true + + // This is the number of onSignalExit's that are in play. + // It's important so that we can count the correct number of + // listeners on signals, and don't wait for the other one to + // handle it instead of us. + emitter.count += 1 + + signals = signals.filter(function (sig) { + try { + process.on(sig, sigListeners[sig]) + return true + } catch (er) { + return false } - _prepareRequest(method, requestUrl, headers) { - const info = {}; - info.parsedUrl = requestUrl; - const usingSsl = info.parsedUrl.protocol === 'https:'; - info.httpModule = usingSsl ? https : http; - const defaultPort = usingSsl ? 443 : 80; - info.options = {}; - info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port - ? parseInt(info.parsedUrl.port) - : defaultPort; - info.options.path = - (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); - info.options.method = method; - info.options.headers = this._mergeHeaders(headers); - if (this.userAgent != null) { - info.options.headers['user-agent'] = this.userAgent; - } - info.options.agent = this._getAgent(info.parsedUrl); - // gives handlers an opportunity to participate - if (this.handlers) { - this.handlers.forEach(handler => { - handler.prepareRequest(info.options); - }); - } - return info; + }) + + process.emit = processEmit + process.reallyExit = processReallyExit +} + +var originalProcessReallyExit = process.reallyExit +function processReallyExit (code) { + process.exitCode = code || 0 + emit('exit', process.exitCode, null) + /* istanbul ignore next */ + emit('afterexit', process.exitCode, null) + /* istanbul ignore next */ + originalProcessReallyExit.call(process, process.exitCode) +} + +var originalProcessEmit = process.emit +function processEmit (ev, arg) { + if (ev === 'exit') { + if (arg !== undefined) { + process.exitCode = arg } - _mergeHeaders(headers) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); - if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); - } - return lowercaseKeys(headers || {}); + var ret = originalProcessEmit.apply(this, arguments) + emit('exit', process.exitCode, null) + /* istanbul ignore next */ + emit('afterexit', process.exitCode, null) + return ret + } else { + return originalProcessEmit.apply(this, arguments) + } +} + + +/***/ }), + +/***/ 3710: +/***/ ((module) => { + +// This is not the set of all possible signals. +// +// It IS, however, the set of all signals that trigger +// an exit on either Linux or BSD systems. Linux is a +// superset of the signal names supported on BSD, and +// the unknown signals just fail to register, so we can +// catch that easily enough. +// +// Don't bother with SIGKILL. It's uncatchable, which +// means that we can't fire any callbacks anyway. +// +// If a user does happen to register a handler on a non- +// fatal signal like SIGWINCH or something, and then +// exit, it'll end up firing `process.emit('exit')`, so +// the handler will be fired anyway. +// +// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised +// artificially, inherently leave the process in a +// state from which it is not safe to try and enter JS +// listeners. +module.exports = [ + 'SIGABRT', + 'SIGALRM', + 'SIGHUP', + 'SIGINT', + 'SIGTERM' +] + +if (process.platform !== 'win32') { + module.exports.push( + 'SIGVTALRM', + 'SIGXCPU', + 'SIGXFSZ', + 'SIGUSR2', + 'SIGTRAP', + 'SIGSYS', + 'SIGQUIT', + 'SIGIOT' + // should detect profiler and enable/disable accordingly. + // see #21 + // 'SIGPROF' + ) +} + +if (process.platform === 'linux') { + module.exports.push( + 'SIGIO', + 'SIGPOLL', + 'SIGPWR', + 'SIGSTKFLT', + 'SIGUNUSED' + ) +} + + +/***/ }), + +/***/ 5515: +/***/ ((module) => { + +"use strict"; + +module.exports = function (x) { + var lf = typeof x === 'string' ? '\n' : '\n'.charCodeAt(); + var cr = typeof x === 'string' ? '\r' : '\r'.charCodeAt(); + + if (x[x.length - 1] === lf) { + x = x.slice(0, x.length - 1); + } + + if (x[x.length - 1] === cr) { + x = x.slice(0, x.length - 1); + } + + return x; +}; + + +/***/ }), + +/***/ 4294: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +module.exports = __webpack_require__(4219); + + +/***/ }), + +/***/ 4219: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + + +var net = __webpack_require__(1631); +var tls = __webpack_require__(4016); +var http = __webpack_require__(8605); +var https = __webpack_require__(7211); +var events = __webpack_require__(8614); +var assert = __webpack_require__(2357); +var util = __webpack_require__(1669); + + +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; + + +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; +} + +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} + +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; +} + +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; +} + + +function TunnelingAgent(options) { + var self = this; + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; + + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; + } } - _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); - let clientHeader; - if (this.requestOptions && this.requestOptions.headers) { - clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; - } - return additionalHeaders[header] || clientHeader || _default; + socket.destroy(); + self.removeSocket(socket); + }); +} +util.inherits(TunnelingAgent, events.EventEmitter); + +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); + return; + } + + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); + + function onFree() { + self.emit('free', socket, options); } - _getAgent(parsedUrl) { - let agent; - let proxyUrl = pm.getProxyUrl(parsedUrl); - let useProxy = proxyUrl && proxyUrl.hostname; - if (this._keepAlive && useProxy) { - agent = this._proxyAgent; - } - if (this._keepAlive && !useProxy) { - agent = this._agent; - } - // if agent is already assigned use that agent. - if (!!agent) { - return agent; - } - const usingSsl = parsedUrl.protocol === 'https:'; - let maxSockets = 100; - if (!!this.requestOptions) { - maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; - } - if (useProxy) { - // If using proxy, need tunnel - if (!tunnel) { - tunnel = __webpack_require__(294); - } - const agentOptions = { - maxSockets: maxSockets, - keepAlive: this._keepAlive, - proxy: { - proxyAuth: proxyUrl.auth, - host: proxyUrl.hostname, - port: proxyUrl.port - } - }; - let tunnelAgent; - const overHttps = proxyUrl.protocol === 'https:'; - if (usingSsl) { - tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; - } - else { - tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; - } - agent = tunnelAgent(agentOptions); - this._proxyAgent = agent; - } - // if reusing agent across request and tunneling agent isn't assigned create a new agent - if (this._keepAlive && !agent) { - const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; - agent = usingSsl ? new https.Agent(options) : new http.Agent(options); - this._agent = agent; - } - // if not using private agent and tunnel agent isn't setup then use global agent - if (!agent) { - agent = usingSsl ? https.globalAgent : http.globalAgent; - } - if (usingSsl && this._ignoreSslError) { - // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process - // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options - // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { - rejectUnauthorized: false - }); - } - return agent; + + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); } - _performExponentialBackoff(retryNumber) { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); + }); +}; + +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); + + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false, + headers: { + host: options.host + ':' + options.port } - static dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - let a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } - } - return value; + }); + if (options.localAddress) { + connectOptions.localAddress = options.localAddress; + } + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); + } + + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); + + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; + } + + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); + } + + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); + + if (res.statusCode !== 200) { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + socket.destroy(); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; } - async _processResponse(res, options) { - return new Promise(async (resolve, reject) => { - const statusCode = res.message.statusCode; - const response = { - statusCode: statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode == HttpCodes.NotFound) { - resolve(response); - } - let obj; - let contents; - // get the result from the body - try { - contents = await res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); - } - else { - obj = JSON.parse(contents); - } - response.result = obj; - } - response.headers = res.message.headers; - } - catch (err) { - // Invalid resource (contents not json); leaving result obj null - } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; - } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; - } - else { - msg = 'Failed request: (' + statusCode + ')'; - } - let err = new Error(msg); - // attach statusCode and body obj (if available) to the error object - err['statusCode'] = statusCode; - if (response.result) { - err['result'] = response.result; - } - reject(err); - } - else { - resolve(response); - } - }); + if (head.length > 0) { + debug('got illegal response body from proxy'); + socket.destroy(); + var error = new Error('got illegal response body from proxy'); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; } -} -exports.HttpClient = HttpClient; + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + return cb(socket); + } + function onError(cause) { + connectReq.removeAllListeners(); -/***/ }), + debug('tunneling socket could not be established, cause=%s\n', + cause.message, cause.stack); + var error = new Error('tunneling socket could not be established, ' + + 'cause=' + cause.message); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + } +}; -/***/ 931: -/***/ (function(module, __unusedexports, __webpack_require__) { +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) { + return; + } + this.sockets.splice(pos, 1); -// Note: since nyc uses this module to output coverage, any lines -// that are in the direct sync flow of nyc's outputCoverage are -// ignored, since we can never get coverage for them. -var assert = __webpack_require__(357) -var signals = __webpack_require__(710) -var isWin = /^win/i.test(process.platform) + var pending = this.requests.shift(); + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(pending, function(socket) { + pending.request.onSocket(socket); + }); + } +}; -var EE = __webpack_require__(614) -/* istanbul ignore if */ -if (typeof EE !== 'function') { - EE = EE.EventEmitter +function createSecureSocket(options, cb) { + var self = this; + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + var hostHeader = options.request.getHeader('host'); + var tlsOptions = mergeOptions({}, self.options, { + socket: socket, + servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host + }); + + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, tlsOptions); + self.sockets[self.sockets.indexOf(socket)] = secureSocket; + cb(secureSocket); + }); } -var emitter -if (process.__signal_exit_emitter__) { - emitter = process.__signal_exit_emitter__ -} else { - emitter = process.__signal_exit_emitter__ = new EE() - emitter.count = 0 - emitter.emitted = {} + +function toOptions(host, port, localAddress) { + if (typeof host === 'string') { // since v0.10 + return { + host: host, + port: port, + localAddress: localAddress + }; + } + return host; // for v0.11 or later } -// Because this emitter is a global, we have to check to see if a -// previous version of this library failed to enable infinite listeners. -// I know what you're about to say. But literally everything about -// signal-exit is a compromise with evil. Get used to it. -if (!emitter.infinite) { - emitter.setMaxListeners(Infinity) - emitter.infinite = true +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i]; + if (typeof overrides === 'object') { + var keys = Object.keys(overrides); + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j]; + if (overrides[k] !== undefined) { + target[k] = overrides[k]; + } + } + } + } + return target; } -module.exports = function (cb, opts) { - assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') - if (loaded === false) { - load() +var debug; +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments); + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0]; + } else { + args.unshift('TUNNEL:'); + } + console.error.apply(console, args); } +} else { + debug = function() {}; +} +exports.debug = debug; // for test - var ev = 'exit' - if (opts && opts.alwaysLast) { - ev = 'afterexit' - } - var remove = function () { - emitter.removeListener(ev, cb) - if (emitter.listeners('exit').length === 0 && - emitter.listeners('afterexit').length === 0) { - unload() +/***/ }), + +/***/ 5030: +/***/ ((__unused_webpack_module, exports, __webpack_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ value: true })); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var osName = _interopDefault(__webpack_require__(4824)); + +function getUserAgent() { + try { + return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`; + } catch (error) { + if (/wmic os get Caption/.test(error.message)) { + return "Windows "; } + + return ""; } - emitter.on(ev, cb) +} - return remove +exports.getUserAgent = getUserAgent; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 4207: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { + +module.exports = which +which.sync = whichSync + +var isWindows = process.platform === 'win32' || + process.env.OSTYPE === 'cygwin' || + process.env.OSTYPE === 'msys' + +var path = __webpack_require__(5622) +var COLON = isWindows ? ';' : ':' +var isexe = __webpack_require__(7126) + +function getNotFoundError (cmd) { + var er = new Error('not found: ' + cmd) + er.code = 'ENOENT' + + return er } -module.exports.unload = unload -function unload () { - if (!loaded) { - return +function getPathInfo (cmd, opt) { + var colon = opt.colon || COLON + var pathEnv = opt.path || process.env.PATH || '' + var pathExt = [''] + + pathEnv = pathEnv.split(colon) + + var pathExtExe = '' + if (isWindows) { + pathEnv.unshift(process.cwd()) + pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM') + pathExt = pathExtExe.split(colon) + + + // Always test the cmd itself first. isexe will check to make sure + // it's found in the pathExt set. + if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') + pathExt.unshift('') } - loaded = false - signals.forEach(function (sig) { - try { - process.removeListener(sig, sigListeners[sig]) - } catch (er) {} - }) - process.emit = originalProcessEmit - process.reallyExit = originalProcessReallyExit - emitter.count -= 1 -} + // If it has a slash, then we don't bother searching the pathenv. + // just check the file itself, and that's it. + if (cmd.match(/\//) || isWindows && cmd.match(/\\/)) + pathEnv = [''] -function emit (event, code, signal) { - if (emitter.emitted[event]) { - return + return { + env: pathEnv, + ext: pathExt, + extExe: pathExtExe } - emitter.emitted[event] = true - emitter.emit(event, code, signal) } -// { : , ... } -var sigListeners = {} -signals.forEach(function (sig) { - sigListeners[sig] = function listener () { - // If there are no other listeners, an exit is coming! - // Simplest way: remove us and then re-send the signal. - // We know that this will kill the process, so we can - // safely emit now. - var listeners = process.listeners(sig) - if (listeners.length === emitter.count) { - unload() - emit('exit', null, sig) - /* istanbul ignore next */ - emit('afterexit', null, sig) - /* istanbul ignore next */ - if (isWin && sig === 'SIGHUP') { - // "SIGHUP" throws an `ENOSYS` error on Windows, - // so use a supported signal instead - sig = 'SIGINT' - } - process.kill(process.pid, sig) - } +function which (cmd, opt, cb) { + if (typeof opt === 'function') { + cb = opt + opt = {} } -}) -module.exports.signals = function () { - return signals + var info = getPathInfo(cmd, opt) + var pathEnv = info.env + var pathExt = info.ext + var pathExtExe = info.extExe + var found = [] + + ;(function F (i, l) { + if (i === l) { + if (opt.all && found.length) + return cb(null, found) + else + return cb(getNotFoundError(cmd)) + } + + var pathPart = pathEnv[i] + if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') + pathPart = pathPart.slice(1, -1) + + var p = path.join(pathPart, cmd) + if (!pathPart && (/^\.[\\\/]/).test(cmd)) { + p = cmd.slice(0, 2) + p + } + ;(function E (ii, ll) { + if (ii === ll) return F(i + 1, l) + var ext = pathExt[ii] + isexe(p + ext, { pathExt: pathExtExe }, function (er, is) { + if (!er && is) { + if (opt.all) + found.push(p + ext) + else + return cb(null, p + ext) + } + return E(ii + 1, ll) + }) + })(0, pathExt.length) + })(0, pathEnv.length) } -module.exports.load = load - -var loaded = false +function whichSync (cmd, opt) { + opt = opt || {} -function load () { - if (loaded) { - return - } - loaded = true + var info = getPathInfo(cmd, opt) + var pathEnv = info.env + var pathExt = info.ext + var pathExtExe = info.extExe + var found = [] - // This is the number of onSignalExit's that are in play. - // It's important so that we can count the correct number of - // listeners on signals, and don't wait for the other one to - // handle it instead of us. - emitter.count += 1 + for (var i = 0, l = pathEnv.length; i < l; i ++) { + var pathPart = pathEnv[i] + if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') + pathPart = pathPart.slice(1, -1) - signals = signals.filter(function (sig) { - try { - process.on(sig, sigListeners[sig]) - return true - } catch (er) { - return false + var p = path.join(pathPart, cmd) + if (!pathPart && /^\.[\\\/]/.test(cmd)) { + p = cmd.slice(0, 2) + p } - }) + for (var j = 0, ll = pathExt.length; j < ll; j ++) { + var cur = p + pathExt[j] + var is + try { + is = isexe.sync(cur, { pathExt: pathExtExe }) + if (is) { + if (opt.all) + found.push(cur) + else + return cur + } + } catch (ex) {} + } + } - process.emit = processEmit - process.reallyExit = processReallyExit -} + if (opt.all && found.length) + return found -var originalProcessReallyExit = process.reallyExit -function processReallyExit (code) { - process.exitCode = code || 0 - emit('exit', process.exitCode, null) - /* istanbul ignore next */ - emit('afterexit', process.exitCode, null) - /* istanbul ignore next */ - originalProcessReallyExit.call(process, process.exitCode) -} + if (opt.nothrow) + return null -var originalProcessEmit = process.emit -function processEmit (ev, arg) { - if (ev === 'exit') { - if (arg !== undefined) { - process.exitCode = arg - } - var ret = originalProcessEmit.apply(this, arguments) - emit('exit', process.exitCode, null) - /* istanbul ignore next */ - emit('afterexit', process.exitCode, null) - return ret - } else { - return originalProcessEmit.apply(this, arguments) - } + throw getNotFoundError(cmd) } /***/ }), -/***/ 932: -/***/ (function(__unusedmodule, exports) { +/***/ 3515: +/***/ ((module, __unused_webpack_exports, __webpack_require__) => { "use strict"; +const os = __webpack_require__(2087); +const execa = __webpack_require__(5447); -Object.defineProperty(exports, '__esModule', { value: true }); +// Reference: https://www.gaijin.at/en/lstwinver.php +const names = new Map([ + ['10.0', '10'], + ['6.3', '8.1'], + ['6.2', '8'], + ['6.1', '7'], + ['6.0', 'Vista'], + ['5.2', 'Server 2003'], + ['5.1', 'XP'], + ['5.0', '2000'], + ['4.9', 'ME'], + ['4.1', '98'], + ['4.0', '95'] +]); -class Deprecation extends Error { - constructor(message) { - super(message); // Maintains proper stack trace (only available on V8) +const windowsRelease = release => { + const version = /\d+\.\d/.exec(release || os.release()); - /* istanbul ignore next */ + if (release && !version) { + throw new Error('`release` argument doesn\'t match `n.n`'); + } - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } + const ver = (version || [])[0]; - this.name = 'Deprecation'; - } + // Server 2008, 2012, 2016, and 2019 versions are ambiguous with desktop versions and must be detected at runtime. + // If `release` is omitted or we're on a Windows system, and the version number is an ambiguous version + // then use `wmic` to get the OS caption: https://msdn.microsoft.com/en-us/library/aa394531(v=vs.85).aspx + // If `wmic` is obsoloete (later versions of Windows 10), use PowerShell instead. + // If the resulting caption contains the year 2008, 2012, 2016 or 2019, it is a server version, so return a server OS name. + if ((!release || release === os.release()) && ['6.1', '6.2', '6.3', '10.0'].includes(ver)) { + let stdout; + try { + stdout = execa.sync('wmic', ['os', 'get', 'Caption']).stdout || ''; + } catch (_) { + stdout = execa.sync('powershell', ['(Get-CimInstance -ClassName Win32_OperatingSystem).caption']).stdout || ''; + } -} + const year = (stdout.match(/2008|2012|2016|2019/) || [])[0]; -exports.Deprecation = Deprecation; + if (year) { + return `Server ${year}`; + } + } + + return names.get(ver); +}; + +module.exports = windowsRelease; /***/ }), -/***/ 940: -/***/ (function(module) { +/***/ 2940: +/***/ ((module) => { // Returns a wrapper function that returns a wrapped callback // The wrapper function should do some stuff, and return a @@ -9520,6 +9367,166 @@ function wrappy (fn, cb) { } +/***/ }), + +/***/ 2877: +/***/ ((module) => { + +module.exports = eval("require")("encoding"); + + +/***/ }), + +/***/ 2357: +/***/ ((module) => { + +"use strict"; +module.exports = require("assert"); + +/***/ }), + +/***/ 3129: +/***/ ((module) => { + +"use strict"; +module.exports = require("child_process"); + +/***/ }), + +/***/ 8614: +/***/ ((module) => { + +"use strict"; +module.exports = require("events"); + +/***/ }), + +/***/ 5747: +/***/ ((module) => { + +"use strict"; +module.exports = require("fs"); + +/***/ }), + +/***/ 8605: +/***/ ((module) => { + +"use strict"; +module.exports = require("http"); + +/***/ }), + +/***/ 7211: +/***/ ((module) => { + +"use strict"; +module.exports = require("https"); + +/***/ }), + +/***/ 1631: +/***/ ((module) => { + +"use strict"; +module.exports = require("net"); + +/***/ }), + +/***/ 2087: +/***/ ((module) => { + +"use strict"; +module.exports = require("os"); + +/***/ }), + +/***/ 5622: +/***/ ((module) => { + +"use strict"; +module.exports = require("path"); + +/***/ }), + +/***/ 2413: +/***/ ((module) => { + +"use strict"; +module.exports = require("stream"); + +/***/ }), + +/***/ 4016: +/***/ ((module) => { + +"use strict"; +module.exports = require("tls"); + +/***/ }), + +/***/ 8835: +/***/ ((module) => { + +"use strict"; +module.exports = require("url"); + +/***/ }), + +/***/ 1669: +/***/ ((module) => { + +"use strict"; +module.exports = require("util"); + +/***/ }), + +/***/ 8761: +/***/ ((module) => { + +"use strict"; +module.exports = require("zlib"); + /***/ }) -/******/ }); \ No newline at end of file +/******/ }); +/************************************************************************/ +/******/ // The module cache +/******/ var __webpack_module_cache__ = {}; +/******/ +/******/ // The require function +/******/ function __webpack_require__(moduleId) { +/******/ // Check if module is in cache +/******/ if(__webpack_module_cache__[moduleId]) { +/******/ return __webpack_module_cache__[moduleId].exports; +/******/ } +/******/ // Create a new module (and put it into the cache) +/******/ var module = __webpack_module_cache__[moduleId] = { +/******/ // no module.id needed +/******/ // no module.loaded needed +/******/ exports: {} +/******/ }; +/******/ +/******/ // Execute the module function +/******/ var threw = true; +/******/ try { +/******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__); +/******/ threw = false; +/******/ } finally { +/******/ if(threw) delete __webpack_module_cache__[moduleId]; +/******/ } +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/************************************************************************/ +/******/ /* webpack/runtime/compat */ +/******/ +/******/ __webpack_require__.ab = __dirname + "/";/************************************************************************/ +/******/ // module exports must be returned from runtime so entry inlining is disabled +/******/ // startup +/******/ // Load entry module and return exports +/******/ return __webpack_require__(3109); +/******/ })() +; \ No newline at end of file